Convert subx
[qemu] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, write to the Free Software
19    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
20  */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define GEN_HELPER 1
35 #include "helper.h"
36
37 #define DEBUG_DISAS
38
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68
69 #include "gen-icount.h"
70
71 typedef struct DisasContext {
72     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75     int is_br;
76     int mem_idx;
77     int fpu_enabled;
78     int address_mask_32bit;
79     uint32_t cc_op;  /* current CC operation */
80     struct TranslationBlock *tb;
81     sparc_def_t *def;
82 } DisasContext;
83
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO)                                  \
86     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO)               \
90     GET_FIELD(X, 31 - (TO), 31 - (FROM))
91
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
102
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
105
106 static int sign_extend(int x, int len)
107 {
108     len = 32 - len;
109     return (x << len) >> len;
110 }
111
112 #define IS_IMM (insn & (1<<13))
113
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
116 {
117     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118                    offsetof(CPU_DoubleU, l.upper));
119     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120                    offsetof(CPU_DoubleU, l.lower));
121 }
122
123 static void gen_op_load_fpr_DT1(unsigned int src)
124 {
125     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126                    offsetof(CPU_DoubleU, l.upper));
127     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128                    offsetof(CPU_DoubleU, l.lower));
129 }
130
131 static void gen_op_store_DT0_fpr(unsigned int dst)
132 {
133     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134                    offsetof(CPU_DoubleU, l.upper));
135     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136                    offsetof(CPU_DoubleU, l.lower));
137 }
138
139 static void gen_op_load_fpr_QT0(unsigned int src)
140 {
141     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142                    offsetof(CPU_QuadU, l.upmost));
143     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144                    offsetof(CPU_QuadU, l.upper));
145     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146                    offsetof(CPU_QuadU, l.lower));
147     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148                    offsetof(CPU_QuadU, l.lowest));
149 }
150
151 static void gen_op_load_fpr_QT1(unsigned int src)
152 {
153     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154                    offsetof(CPU_QuadU, l.upmost));
155     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156                    offsetof(CPU_QuadU, l.upper));
157     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158                    offsetof(CPU_QuadU, l.lower));
159     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160                    offsetof(CPU_QuadU, l.lowest));
161 }
162
163 static void gen_op_store_QT0_fpr(unsigned int dst)
164 {
165     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166                    offsetof(CPU_QuadU, l.upmost));
167     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168                    offsetof(CPU_QuadU, l.upper));
169     tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170                    offsetof(CPU_QuadU, l.lower));
171     tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172                    offsetof(CPU_QuadU, l.lowest));
173 }
174
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
188
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
196
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 {
199 #ifdef TARGET_SPARC64
200     if (AM_CHECK(dc))
201         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
203 }
204
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 {
207     if (reg == 0)
208         tcg_gen_movi_tl(tn, 0);
209     else if (reg < 8)
210         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211     else {
212         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
213     }
214 }
215
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 {
218     if (reg == 0)
219         return;
220     else if (reg < 8)
221         tcg_gen_mov_tl(cpu_gregs[reg], tn);
222     else {
223         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224     }
225 }
226
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228                                target_ulong pc, target_ulong npc)
229 {
230     TranslationBlock *tb;
231
232     tb = s->tb;
233     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
235         /* jump to same page: we can use a direct jump */
236         tcg_gen_goto_tb(tb_num);
237         tcg_gen_movi_tl(cpu_pc, pc);
238         tcg_gen_movi_tl(cpu_npc, npc);
239         tcg_gen_exit_tb((long)tb + tb_num);
240     } else {
241         /* jump to another page: currently not optimized */
242         tcg_gen_movi_tl(cpu_pc, pc);
243         tcg_gen_movi_tl(cpu_npc, npc);
244         tcg_gen_exit_tb(0);
245     }
246 }
247
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 {
251     tcg_gen_extu_i32_tl(reg, src);
252     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253     tcg_gen_andi_tl(reg, reg, 0x1);
254 }
255
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 {
258     tcg_gen_extu_i32_tl(reg, src);
259     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260     tcg_gen_andi_tl(reg, reg, 0x1);
261 }
262
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 {
265     tcg_gen_extu_i32_tl(reg, src);
266     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267     tcg_gen_andi_tl(reg, reg, 0x1);
268 }
269
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 {
272     tcg_gen_extu_i32_tl(reg, src);
273     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274     tcg_gen_andi_tl(reg, reg, 0x1);
275 }
276
277 static inline void gen_cc_clear_icc(void)
278 {
279     tcg_gen_movi_i32(cpu_psr, 0);
280 }
281
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
284 {
285     tcg_gen_movi_i32(cpu_xcc, 0);
286 }
287 #endif
288
289 /* old op:
290     if (!T0)
291         env->psr |= PSR_ZERO;
292     if ((int32_t) T0 < 0)
293         env->psr |= PSR_NEG;
294 */
295 static inline void gen_cc_NZ_icc(TCGv dst)
296 {
297     TCGv r_temp;
298     int l1, l2;
299
300     l1 = gen_new_label();
301     l2 = gen_new_label();
302     r_temp = tcg_temp_new();
303     tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306     gen_set_label(l1);
307     tcg_gen_ext32s_tl(r_temp, dst);
308     tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310     gen_set_label(l2);
311     tcg_temp_free(r_temp);
312 }
313
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
316 {
317     int l1, l2;
318
319     l1 = gen_new_label();
320     l2 = gen_new_label();
321     tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323     gen_set_label(l1);
324     tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326     gen_set_label(l2);
327 }
328 #endif
329
330 /* old op:
331     if (T0 < src1)
332         env->psr |= PSR_CARRY;
333 */
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 {
336     TCGv r_temp1, r_temp2;
337     int l1;
338
339     l1 = gen_new_label();
340     r_temp1 = tcg_temp_new();
341     r_temp2 = tcg_temp_new();
342     tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343     tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346     gen_set_label(l1);
347     tcg_temp_free(r_temp1);
348     tcg_temp_free(r_temp2);
349 }
350
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 {
354     int l1;
355
356     l1 = gen_new_label();
357     tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359     gen_set_label(l1);
360 }
361 #endif
362
363 /* old op:
364     if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365         env->psr |= PSR_OVF;
366 */
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 {
369     TCGv r_temp;
370
371     r_temp = tcg_temp_new();
372     tcg_gen_xor_tl(r_temp, src1, src2);
373     tcg_gen_not_tl(r_temp, r_temp);
374     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379     tcg_temp_free(r_temp);
380     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
381 }
382
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 {
386     TCGv r_temp;
387
388     r_temp = tcg_temp_new();
389     tcg_gen_xor_tl(r_temp, src1, src2);
390     tcg_gen_not_tl(r_temp, r_temp);
391     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396     tcg_temp_free(r_temp);
397     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 }
399 #endif
400
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 {
403     TCGv r_temp;
404     TCGv_i32 r_const;
405     int l1;
406
407     l1 = gen_new_label();
408
409     r_temp = tcg_temp_new();
410     tcg_gen_xor_tl(r_temp, src1, src2);
411     tcg_gen_not_tl(r_temp, r_temp);
412     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416     r_const = tcg_const_i32(TT_TOVF);
417     gen_helper_raise_exception(r_const);
418     tcg_temp_free_i32(r_const);
419     gen_set_label(l1);
420     tcg_temp_free(r_temp);
421 }
422
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424 {
425     int l1;
426
427     l1 = gen_new_label();
428     tcg_gen_or_tl(cpu_tmp0, src1, src2);
429     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
432     gen_set_label(l1);
433 }
434
435 static inline void gen_tag_tv(TCGv src1, TCGv src2)
436 {
437     int l1;
438     TCGv_i32 r_const;
439
440     l1 = gen_new_label();
441     tcg_gen_or_tl(cpu_tmp0, src1, src2);
442     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
443     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
444     r_const = tcg_const_i32(TT_TOVF);
445     gen_helper_raise_exception(r_const);
446     tcg_temp_free_i32(r_const);
447     gen_set_label(l1);
448 }
449
450 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
451 {
452     tcg_gen_mov_tl(cpu_cc_src, src1);
453     tcg_gen_movi_tl(cpu_cc_src2, src2);
454     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
455     tcg_gen_mov_tl(dst, cpu_cc_dst);
456 }
457
458 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
459 {
460     tcg_gen_mov_tl(cpu_cc_src, src1);
461     tcg_gen_mov_tl(cpu_cc_src2, src2);
462     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463     tcg_gen_mov_tl(dst, cpu_cc_dst);
464 }
465
466 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
467 {
468     tcg_gen_mov_tl(cpu_cc_src, src1);
469     tcg_gen_movi_tl(cpu_cc_src2, src2);
470     gen_mov_reg_C(cpu_tmp0, cpu_psr);
471     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
472     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
473     tcg_gen_mov_tl(dst, cpu_cc_dst);
474 }
475
476 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
477 {
478     tcg_gen_mov_tl(cpu_cc_src, src1);
479     tcg_gen_mov_tl(cpu_cc_src2, src2);
480     gen_mov_reg_C(cpu_tmp0, cpu_psr);
481     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
482     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
483     tcg_gen_mov_tl(dst, cpu_cc_dst);
484 }
485
486 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     gen_cc_clear_icc();
492     gen_cc_NZ_icc(cpu_cc_dst);
493     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
494     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
495     gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
496 #ifdef TARGET_SPARC64
497     gen_cc_clear_xcc();
498     gen_cc_NZ_xcc(cpu_cc_dst);
499     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
500     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
501 #endif
502     tcg_gen_mov_tl(dst, cpu_cc_dst);
503 }
504
505 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
506 {
507     tcg_gen_mov_tl(cpu_cc_src, src1);
508     tcg_gen_mov_tl(cpu_cc_src2, src2);
509     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
510     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511     gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
512     gen_cc_clear_icc();
513     gen_cc_NZ_icc(cpu_cc_dst);
514     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515 #ifdef TARGET_SPARC64
516     gen_cc_clear_xcc();
517     gen_cc_NZ_xcc(cpu_cc_dst);
518     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
519     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
520 #endif
521     tcg_gen_mov_tl(dst, cpu_cc_dst);
522 }
523
524 /* old op:
525     if (src1 < T1)
526         env->psr |= PSR_CARRY;
527 */
528 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
529 {
530     TCGv r_temp1, r_temp2;
531     int l1;
532
533     l1 = gen_new_label();
534     r_temp1 = tcg_temp_new();
535     r_temp2 = tcg_temp_new();
536     tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
537     tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
538     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
539     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
540     gen_set_label(l1);
541     tcg_temp_free(r_temp1);
542     tcg_temp_free(r_temp2);
543 }
544
545 #ifdef TARGET_SPARC64
546 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
547 {
548     int l1;
549
550     l1 = gen_new_label();
551     tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
552     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
553     gen_set_label(l1);
554 }
555 #endif
556
557 /* old op:
558     if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
559         env->psr |= PSR_OVF;
560 */
561 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
562 {
563     TCGv r_temp;
564
565     r_temp = tcg_temp_new();
566     tcg_gen_xor_tl(r_temp, src1, src2);
567     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
568     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
569     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
570     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
571     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
572     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
573     tcg_temp_free(r_temp);
574 }
575
576 #ifdef TARGET_SPARC64
577 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
578 {
579     TCGv r_temp;
580
581     r_temp = tcg_temp_new();
582     tcg_gen_xor_tl(r_temp, src1, src2);
583     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
584     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
585     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
586     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
587     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
588     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
589     tcg_temp_free(r_temp);
590 }
591 #endif
592
593 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
594 {
595     TCGv r_temp;
596     TCGv_i32 r_const;
597     int l1;
598
599     l1 = gen_new_label();
600
601     r_temp = tcg_temp_new();
602     tcg_gen_xor_tl(r_temp, src1, src2);
603     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
604     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
605     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
606     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
607     r_const = tcg_const_i32(TT_TOVF);
608     gen_helper_raise_exception(r_const);
609     tcg_temp_free_i32(r_const);
610     gen_set_label(l1);
611     tcg_temp_free(r_temp);
612 }
613
614 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
615 {
616     tcg_gen_mov_tl(cpu_cc_src, src1);
617     tcg_gen_movi_tl(cpu_cc_src2, src2);
618     if (src2 == 0) {
619         tcg_gen_mov_tl(cpu_cc_dst, src1);
620         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
621         dc->cc_op = CC_OP_LOGIC;
622     } else {
623         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
624         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
625         dc->cc_op = CC_OP_SUB;
626     }
627     tcg_gen_mov_tl(dst, cpu_cc_dst);
628 }
629
630 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
631 {
632     tcg_gen_mov_tl(cpu_cc_src, src1);
633     tcg_gen_mov_tl(cpu_cc_src2, src2);
634     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
635     tcg_gen_mov_tl(dst, cpu_cc_dst);
636 }
637
638 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
639 {
640     tcg_gen_mov_tl(cpu_cc_src, src1);
641     tcg_gen_movi_tl(cpu_cc_src2, src2);
642     gen_mov_reg_C(cpu_tmp0, cpu_psr);
643     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
644     tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
645     tcg_gen_mov_tl(dst, cpu_cc_dst);
646 }
647
648 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
649 {
650     tcg_gen_mov_tl(cpu_cc_src, src1);
651     tcg_gen_mov_tl(cpu_cc_src2, src2);
652     gen_mov_reg_C(cpu_tmp0, cpu_psr);
653     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
654     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
655     tcg_gen_mov_tl(dst, cpu_cc_dst);
656 }
657
658 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
659 {
660     tcg_gen_mov_tl(cpu_cc_src, src1);
661     tcg_gen_mov_tl(cpu_cc_src2, src2);
662     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
663     gen_cc_clear_icc();
664     gen_cc_NZ_icc(cpu_cc_dst);
665     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
666     gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667     gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
668 #ifdef TARGET_SPARC64
669     gen_cc_clear_xcc();
670     gen_cc_NZ_xcc(cpu_cc_dst);
671     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
672     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
673 #endif
674     tcg_gen_mov_tl(dst, cpu_cc_dst);
675 }
676
677 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
678 {
679     tcg_gen_mov_tl(cpu_cc_src, src1);
680     tcg_gen_mov_tl(cpu_cc_src2, src2);
681     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
682     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
683     gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
684     gen_cc_clear_icc();
685     gen_cc_NZ_icc(cpu_cc_dst);
686     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
687 #ifdef TARGET_SPARC64
688     gen_cc_clear_xcc();
689     gen_cc_NZ_xcc(cpu_cc_dst);
690     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
691     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
692 #endif
693     tcg_gen_mov_tl(dst, cpu_cc_dst);
694 }
695
696 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
697 {
698     TCGv r_temp;
699     int l1;
700
701     l1 = gen_new_label();
702     r_temp = tcg_temp_new();
703
704     /* old op:
705     if (!(env->y & 1))
706         T1 = 0;
707     */
708     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
709     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
710     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
711     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
712     tcg_gen_movi_tl(cpu_cc_src2, 0);
713     gen_set_label(l1);
714
715     // b2 = T0 & 1;
716     // env->y = (b2 << 31) | (env->y >> 1);
717     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
718     tcg_gen_shli_tl(r_temp, r_temp, 31);
719     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
720     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
721     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
722     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
723
724     // b1 = N ^ V;
725     gen_mov_reg_N(cpu_tmp0, cpu_psr);
726     gen_mov_reg_V(r_temp, cpu_psr);
727     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
728     tcg_temp_free(r_temp);
729
730     // T0 = (b1 << 31) | (T0 >> 1);
731     // src1 = T0;
732     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
733     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
734     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
735
736     /* do addition and update flags */
737     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
738
739     gen_cc_clear_icc();
740     gen_cc_NZ_icc(cpu_cc_dst);
741     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
742     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
743     tcg_gen_mov_tl(dst, cpu_cc_dst);
744 }
745
746 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
747 {
748     TCGv_i64 r_temp, r_temp2;
749
750     r_temp = tcg_temp_new_i64();
751     r_temp2 = tcg_temp_new_i64();
752
753     tcg_gen_extu_tl_i64(r_temp, src2);
754     tcg_gen_extu_tl_i64(r_temp2, src1);
755     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
756
757     tcg_gen_shri_i64(r_temp, r_temp2, 32);
758     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
759     tcg_temp_free_i64(r_temp);
760     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
761 #ifdef TARGET_SPARC64
762     tcg_gen_mov_i64(dst, r_temp2);
763 #else
764     tcg_gen_trunc_i64_tl(dst, r_temp2);
765 #endif
766     tcg_temp_free_i64(r_temp2);
767 }
768
769 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
770 {
771     TCGv_i64 r_temp, r_temp2;
772
773     r_temp = tcg_temp_new_i64();
774     r_temp2 = tcg_temp_new_i64();
775
776     tcg_gen_ext_tl_i64(r_temp, src2);
777     tcg_gen_ext_tl_i64(r_temp2, src1);
778     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
779
780     tcg_gen_shri_i64(r_temp, r_temp2, 32);
781     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
782     tcg_temp_free_i64(r_temp);
783     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
784 #ifdef TARGET_SPARC64
785     tcg_gen_mov_i64(dst, r_temp2);
786 #else
787     tcg_gen_trunc_i64_tl(dst, r_temp2);
788 #endif
789     tcg_temp_free_i64(r_temp2);
790 }
791
792 #ifdef TARGET_SPARC64
793 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
794 {
795     TCGv_i32 r_const;
796     int l1;
797
798     l1 = gen_new_label();
799     tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
800     r_const = tcg_const_i32(TT_DIV_ZERO);
801     gen_helper_raise_exception(r_const);
802     tcg_temp_free_i32(r_const);
803     gen_set_label(l1);
804 }
805
806 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
807 {
808     int l1, l2;
809
810     l1 = gen_new_label();
811     l2 = gen_new_label();
812     tcg_gen_mov_tl(cpu_cc_src, src1);
813     tcg_gen_mov_tl(cpu_cc_src2, src2);
814     gen_trap_ifdivzero_tl(cpu_cc_src2);
815     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
816     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
817     tcg_gen_movi_i64(dst, INT64_MIN);
818     tcg_gen_br(l2);
819     gen_set_label(l1);
820     tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
821     gen_set_label(l2);
822 }
823 #endif
824
825 static inline void gen_op_div_cc(TCGv dst)
826 {
827     int l1;
828
829     tcg_gen_mov_tl(cpu_cc_dst, dst);
830     gen_cc_clear_icc();
831     gen_cc_NZ_icc(cpu_cc_dst);
832     l1 = gen_new_label();
833     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
834     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
835     gen_set_label(l1);
836 }
837
838 // 1
839 static inline void gen_op_eval_ba(TCGv dst)
840 {
841     tcg_gen_movi_tl(dst, 1);
842 }
843
844 // Z
845 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
846 {
847     gen_mov_reg_Z(dst, src);
848 }
849
850 // Z | (N ^ V)
851 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
852 {
853     gen_mov_reg_N(cpu_tmp0, src);
854     gen_mov_reg_V(dst, src);
855     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
856     gen_mov_reg_Z(cpu_tmp0, src);
857     tcg_gen_or_tl(dst, dst, cpu_tmp0);
858 }
859
860 // N ^ V
861 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
862 {
863     gen_mov_reg_V(cpu_tmp0, src);
864     gen_mov_reg_N(dst, src);
865     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
866 }
867
868 // C | Z
869 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
870 {
871     gen_mov_reg_Z(cpu_tmp0, src);
872     gen_mov_reg_C(dst, src);
873     tcg_gen_or_tl(dst, dst, cpu_tmp0);
874 }
875
876 // C
877 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
878 {
879     gen_mov_reg_C(dst, src);
880 }
881
882 // V
883 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
884 {
885     gen_mov_reg_V(dst, src);
886 }
887
888 // 0
889 static inline void gen_op_eval_bn(TCGv dst)
890 {
891     tcg_gen_movi_tl(dst, 0);
892 }
893
894 // N
895 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
896 {
897     gen_mov_reg_N(dst, src);
898 }
899
900 // !Z
901 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
902 {
903     gen_mov_reg_Z(dst, src);
904     tcg_gen_xori_tl(dst, dst, 0x1);
905 }
906
907 // !(Z | (N ^ V))
908 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
909 {
910     gen_mov_reg_N(cpu_tmp0, src);
911     gen_mov_reg_V(dst, src);
912     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
913     gen_mov_reg_Z(cpu_tmp0, src);
914     tcg_gen_or_tl(dst, dst, cpu_tmp0);
915     tcg_gen_xori_tl(dst, dst, 0x1);
916 }
917
918 // !(N ^ V)
919 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
920 {
921     gen_mov_reg_V(cpu_tmp0, src);
922     gen_mov_reg_N(dst, src);
923     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
924     tcg_gen_xori_tl(dst, dst, 0x1);
925 }
926
927 // !(C | Z)
928 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
929 {
930     gen_mov_reg_Z(cpu_tmp0, src);
931     gen_mov_reg_C(dst, src);
932     tcg_gen_or_tl(dst, dst, cpu_tmp0);
933     tcg_gen_xori_tl(dst, dst, 0x1);
934 }
935
936 // !C
937 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
938 {
939     gen_mov_reg_C(dst, src);
940     tcg_gen_xori_tl(dst, dst, 0x1);
941 }
942
943 // !N
944 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
945 {
946     gen_mov_reg_N(dst, src);
947     tcg_gen_xori_tl(dst, dst, 0x1);
948 }
949
950 // !V
951 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
952 {
953     gen_mov_reg_V(dst, src);
954     tcg_gen_xori_tl(dst, dst, 0x1);
955 }
956
957 /*
958   FPSR bit field FCC1 | FCC0:
959    0 =
960    1 <
961    2 >
962    3 unordered
963 */
964 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
965                                     unsigned int fcc_offset)
966 {
967     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
968     tcg_gen_andi_tl(reg, reg, 0x1);
969 }
970
971 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
972                                     unsigned int fcc_offset)
973 {
974     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
975     tcg_gen_andi_tl(reg, reg, 0x1);
976 }
977
978 // !0: FCC0 | FCC1
979 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
980                                     unsigned int fcc_offset)
981 {
982     gen_mov_reg_FCC0(dst, src, fcc_offset);
983     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
984     tcg_gen_or_tl(dst, dst, cpu_tmp0);
985 }
986
987 // 1 or 2: FCC0 ^ FCC1
988 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
989                                     unsigned int fcc_offset)
990 {
991     gen_mov_reg_FCC0(dst, src, fcc_offset);
992     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
993     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
994 }
995
996 // 1 or 3: FCC0
997 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
998                                     unsigned int fcc_offset)
999 {
1000     gen_mov_reg_FCC0(dst, src, fcc_offset);
1001 }
1002
1003 // 1: FCC0 & !FCC1
1004 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1005                                     unsigned int fcc_offset)
1006 {
1007     gen_mov_reg_FCC0(dst, src, fcc_offset);
1008     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1010     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1011 }
1012
1013 // 2 or 3: FCC1
1014 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1015                                     unsigned int fcc_offset)
1016 {
1017     gen_mov_reg_FCC1(dst, src, fcc_offset);
1018 }
1019
1020 // 2: !FCC0 & FCC1
1021 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1022                                     unsigned int fcc_offset)
1023 {
1024     gen_mov_reg_FCC0(dst, src, fcc_offset);
1025     tcg_gen_xori_tl(dst, dst, 0x1);
1026     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1027     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1028 }
1029
1030 // 3: FCC0 & FCC1
1031 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1032                                     unsigned int fcc_offset)
1033 {
1034     gen_mov_reg_FCC0(dst, src, fcc_offset);
1035     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1036     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1037 }
1038
1039 // 0: !(FCC0 | FCC1)
1040 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1041                                     unsigned int fcc_offset)
1042 {
1043     gen_mov_reg_FCC0(dst, src, fcc_offset);
1044     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1045     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1046     tcg_gen_xori_tl(dst, dst, 0x1);
1047 }
1048
1049 // 0 or 3: !(FCC0 ^ FCC1)
1050 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1051                                     unsigned int fcc_offset)
1052 {
1053     gen_mov_reg_FCC0(dst, src, fcc_offset);
1054     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1055     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1056     tcg_gen_xori_tl(dst, dst, 0x1);
1057 }
1058
1059 // 0 or 2: !FCC0
1060 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1061                                     unsigned int fcc_offset)
1062 {
1063     gen_mov_reg_FCC0(dst, src, fcc_offset);
1064     tcg_gen_xori_tl(dst, dst, 0x1);
1065 }
1066
1067 // !1: !(FCC0 & !FCC1)
1068 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1069                                     unsigned int fcc_offset)
1070 {
1071     gen_mov_reg_FCC0(dst, src, fcc_offset);
1072     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1073     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1074     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1075     tcg_gen_xori_tl(dst, dst, 0x1);
1076 }
1077
1078 // 0 or 1: !FCC1
1079 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1080                                     unsigned int fcc_offset)
1081 {
1082     gen_mov_reg_FCC1(dst, src, fcc_offset);
1083     tcg_gen_xori_tl(dst, dst, 0x1);
1084 }
1085
1086 // !2: !(!FCC0 & FCC1)
1087 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1088                                     unsigned int fcc_offset)
1089 {
1090     gen_mov_reg_FCC0(dst, src, fcc_offset);
1091     tcg_gen_xori_tl(dst, dst, 0x1);
1092     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1093     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1094     tcg_gen_xori_tl(dst, dst, 0x1);
1095 }
1096
1097 // !3: !(FCC0 & FCC1)
1098 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1099                                     unsigned int fcc_offset)
1100 {
1101     gen_mov_reg_FCC0(dst, src, fcc_offset);
1102     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1103     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1104     tcg_gen_xori_tl(dst, dst, 0x1);
1105 }
1106
1107 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1108                                target_ulong pc2, TCGv r_cond)
1109 {
1110     int l1;
1111
1112     l1 = gen_new_label();
1113
1114     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1115
1116     gen_goto_tb(dc, 0, pc1, pc1 + 4);
1117
1118     gen_set_label(l1);
1119     gen_goto_tb(dc, 1, pc2, pc2 + 4);
1120 }
1121
1122 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1123                                 target_ulong pc2, TCGv r_cond)
1124 {
1125     int l1;
1126
1127     l1 = gen_new_label();
1128
1129     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1130
1131     gen_goto_tb(dc, 0, pc2, pc1);
1132
1133     gen_set_label(l1);
1134     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1135 }
1136
1137 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1138                                       TCGv r_cond)
1139 {
1140     int l1, l2;
1141
1142     l1 = gen_new_label();
1143     l2 = gen_new_label();
1144
1145     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1146
1147     tcg_gen_movi_tl(cpu_npc, npc1);
1148     tcg_gen_br(l2);
1149
1150     gen_set_label(l1);
1151     tcg_gen_movi_tl(cpu_npc, npc2);
1152     gen_set_label(l2);
1153 }
1154
1155 /* call this function before using the condition register as it may
1156    have been set for a jump */
1157 static inline void flush_cond(DisasContext *dc, TCGv cond)
1158 {
1159     if (dc->npc == JUMP_PC) {
1160         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1161         dc->npc = DYNAMIC_PC;
1162     }
1163 }
1164
1165 static inline void save_npc(DisasContext *dc, TCGv cond)
1166 {
1167     if (dc->npc == JUMP_PC) {
1168         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1169         dc->npc = DYNAMIC_PC;
1170     } else if (dc->npc != DYNAMIC_PC) {
1171         tcg_gen_movi_tl(cpu_npc, dc->npc);
1172     }
1173 }
1174
1175 static inline void save_state(DisasContext *dc, TCGv cond)
1176 {
1177     tcg_gen_movi_tl(cpu_pc, dc->pc);
1178     save_npc(dc, cond);
1179 }
1180
1181 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1182 {
1183     if (dc->npc == JUMP_PC) {
1184         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1185         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1186         dc->pc = DYNAMIC_PC;
1187     } else if (dc->npc == DYNAMIC_PC) {
1188         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1189         dc->pc = DYNAMIC_PC;
1190     } else {
1191         dc->pc = dc->npc;
1192     }
1193 }
1194
1195 static inline void gen_op_next_insn(void)
1196 {
1197     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1198     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1199 }
1200
1201 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1202                             DisasContext *dc)
1203 {
1204     TCGv_i32 r_src;
1205
1206 #ifdef TARGET_SPARC64
1207     if (cc)
1208         r_src = cpu_xcc;
1209     else
1210         r_src = cpu_psr;
1211 #else
1212     r_src = cpu_psr;
1213 #endif
1214     switch (dc->cc_op) {
1215     case CC_OP_FLAGS:
1216         break;
1217     default:
1218         gen_helper_compute_psr();
1219         dc->cc_op = CC_OP_FLAGS;
1220         break;
1221     }
1222     switch (cond) {
1223     case 0x0:
1224         gen_op_eval_bn(r_dst);
1225         break;
1226     case 0x1:
1227         gen_op_eval_be(r_dst, r_src);
1228         break;
1229     case 0x2:
1230         gen_op_eval_ble(r_dst, r_src);
1231         break;
1232     case 0x3:
1233         gen_op_eval_bl(r_dst, r_src);
1234         break;
1235     case 0x4:
1236         gen_op_eval_bleu(r_dst, r_src);
1237         break;
1238     case 0x5:
1239         gen_op_eval_bcs(r_dst, r_src);
1240         break;
1241     case 0x6:
1242         gen_op_eval_bneg(r_dst, r_src);
1243         break;
1244     case 0x7:
1245         gen_op_eval_bvs(r_dst, r_src);
1246         break;
1247     case 0x8:
1248         gen_op_eval_ba(r_dst);
1249         break;
1250     case 0x9:
1251         gen_op_eval_bne(r_dst, r_src);
1252         break;
1253     case 0xa:
1254         gen_op_eval_bg(r_dst, r_src);
1255         break;
1256     case 0xb:
1257         gen_op_eval_bge(r_dst, r_src);
1258         break;
1259     case 0xc:
1260         gen_op_eval_bgu(r_dst, r_src);
1261         break;
1262     case 0xd:
1263         gen_op_eval_bcc(r_dst, r_src);
1264         break;
1265     case 0xe:
1266         gen_op_eval_bpos(r_dst, r_src);
1267         break;
1268     case 0xf:
1269         gen_op_eval_bvc(r_dst, r_src);
1270         break;
1271     }
1272 }
1273
1274 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1275 {
1276     unsigned int offset;
1277
1278     switch (cc) {
1279     default:
1280     case 0x0:
1281         offset = 0;
1282         break;
1283     case 0x1:
1284         offset = 32 - 10;
1285         break;
1286     case 0x2:
1287         offset = 34 - 10;
1288         break;
1289     case 0x3:
1290         offset = 36 - 10;
1291         break;
1292     }
1293
1294     switch (cond) {
1295     case 0x0:
1296         gen_op_eval_bn(r_dst);
1297         break;
1298     case 0x1:
1299         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1300         break;
1301     case 0x2:
1302         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1303         break;
1304     case 0x3:
1305         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1306         break;
1307     case 0x4:
1308         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1309         break;
1310     case 0x5:
1311         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1312         break;
1313     case 0x6:
1314         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1315         break;
1316     case 0x7:
1317         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1318         break;
1319     case 0x8:
1320         gen_op_eval_ba(r_dst);
1321         break;
1322     case 0x9:
1323         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1324         break;
1325     case 0xa:
1326         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1327         break;
1328     case 0xb:
1329         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1330         break;
1331     case 0xc:
1332         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1333         break;
1334     case 0xd:
1335         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1336         break;
1337     case 0xe:
1338         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1339         break;
1340     case 0xf:
1341         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1342         break;
1343     }
1344 }
1345
1346 #ifdef TARGET_SPARC64
1347 // Inverted logic
1348 static const int gen_tcg_cond_reg[8] = {
1349     -1,
1350     TCG_COND_NE,
1351     TCG_COND_GT,
1352     TCG_COND_GE,
1353     -1,
1354     TCG_COND_EQ,
1355     TCG_COND_LE,
1356     TCG_COND_LT,
1357 };
1358
1359 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1360 {
1361     int l1;
1362
1363     l1 = gen_new_label();
1364     tcg_gen_movi_tl(r_dst, 0);
1365     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1366     tcg_gen_movi_tl(r_dst, 1);
1367     gen_set_label(l1);
1368 }
1369 #endif
1370
1371 /* XXX: potentially incorrect if dynamic npc */
1372 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1373                       TCGv r_cond)
1374 {
1375     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1376     target_ulong target = dc->pc + offset;
1377
1378     if (cond == 0x0) {
1379         /* unconditional not taken */
1380         if (a) {
1381             dc->pc = dc->npc + 4;
1382             dc->npc = dc->pc + 4;
1383         } else {
1384             dc->pc = dc->npc;
1385             dc->npc = dc->pc + 4;
1386         }
1387     } else if (cond == 0x8) {
1388         /* unconditional taken */
1389         if (a) {
1390             dc->pc = target;
1391             dc->npc = dc->pc + 4;
1392         } else {
1393             dc->pc = dc->npc;
1394             dc->npc = target;
1395         }
1396     } else {
1397         flush_cond(dc, r_cond);
1398         gen_cond(r_cond, cc, cond, dc);
1399         if (a) {
1400             gen_branch_a(dc, target, dc->npc, r_cond);
1401             dc->is_br = 1;
1402         } else {
1403             dc->pc = dc->npc;
1404             dc->jump_pc[0] = target;
1405             dc->jump_pc[1] = dc->npc + 4;
1406             dc->npc = JUMP_PC;
1407         }
1408     }
1409 }
1410
1411 /* XXX: potentially incorrect if dynamic npc */
1412 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1413                       TCGv r_cond)
1414 {
1415     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1416     target_ulong target = dc->pc + offset;
1417
1418     if (cond == 0x0) {
1419         /* unconditional not taken */
1420         if (a) {
1421             dc->pc = dc->npc + 4;
1422             dc->npc = dc->pc + 4;
1423         } else {
1424             dc->pc = dc->npc;
1425             dc->npc = dc->pc + 4;
1426         }
1427     } else if (cond == 0x8) {
1428         /* unconditional taken */
1429         if (a) {
1430             dc->pc = target;
1431             dc->npc = dc->pc + 4;
1432         } else {
1433             dc->pc = dc->npc;
1434             dc->npc = target;
1435         }
1436     } else {
1437         flush_cond(dc, r_cond);
1438         gen_fcond(r_cond, cc, cond);
1439         if (a) {
1440             gen_branch_a(dc, target, dc->npc, r_cond);
1441             dc->is_br = 1;
1442         } else {
1443             dc->pc = dc->npc;
1444             dc->jump_pc[0] = target;
1445             dc->jump_pc[1] = dc->npc + 4;
1446             dc->npc = JUMP_PC;
1447         }
1448     }
1449 }
1450
1451 #ifdef TARGET_SPARC64
1452 /* XXX: potentially incorrect if dynamic npc */
1453 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1454                           TCGv r_cond, TCGv r_reg)
1455 {
1456     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1457     target_ulong target = dc->pc + offset;
1458
1459     flush_cond(dc, r_cond);
1460     gen_cond_reg(r_cond, cond, r_reg);
1461     if (a) {
1462         gen_branch_a(dc, target, dc->npc, r_cond);
1463         dc->is_br = 1;
1464     } else {
1465         dc->pc = dc->npc;
1466         dc->jump_pc[0] = target;
1467         dc->jump_pc[1] = dc->npc + 4;
1468         dc->npc = JUMP_PC;
1469     }
1470 }
1471
1472 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1473 {
1474     switch (fccno) {
1475     case 0:
1476         gen_helper_fcmps(r_rs1, r_rs2);
1477         break;
1478     case 1:
1479         gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1480         break;
1481     case 2:
1482         gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1483         break;
1484     case 3:
1485         gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1486         break;
1487     }
1488 }
1489
1490 static inline void gen_op_fcmpd(int fccno)
1491 {
1492     switch (fccno) {
1493     case 0:
1494         gen_helper_fcmpd();
1495         break;
1496     case 1:
1497         gen_helper_fcmpd_fcc1();
1498         break;
1499     case 2:
1500         gen_helper_fcmpd_fcc2();
1501         break;
1502     case 3:
1503         gen_helper_fcmpd_fcc3();
1504         break;
1505     }
1506 }
1507
1508 static inline void gen_op_fcmpq(int fccno)
1509 {
1510     switch (fccno) {
1511     case 0:
1512         gen_helper_fcmpq();
1513         break;
1514     case 1:
1515         gen_helper_fcmpq_fcc1();
1516         break;
1517     case 2:
1518         gen_helper_fcmpq_fcc2();
1519         break;
1520     case 3:
1521         gen_helper_fcmpq_fcc3();
1522         break;
1523     }
1524 }
1525
1526 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1527 {
1528     switch (fccno) {
1529     case 0:
1530         gen_helper_fcmpes(r_rs1, r_rs2);
1531         break;
1532     case 1:
1533         gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1534         break;
1535     case 2:
1536         gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1537         break;
1538     case 3:
1539         gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1540         break;
1541     }
1542 }
1543
1544 static inline void gen_op_fcmped(int fccno)
1545 {
1546     switch (fccno) {
1547     case 0:
1548         gen_helper_fcmped();
1549         break;
1550     case 1:
1551         gen_helper_fcmped_fcc1();
1552         break;
1553     case 2:
1554         gen_helper_fcmped_fcc2();
1555         break;
1556     case 3:
1557         gen_helper_fcmped_fcc3();
1558         break;
1559     }
1560 }
1561
1562 static inline void gen_op_fcmpeq(int fccno)
1563 {
1564     switch (fccno) {
1565     case 0:
1566         gen_helper_fcmpeq();
1567         break;
1568     case 1:
1569         gen_helper_fcmpeq_fcc1();
1570         break;
1571     case 2:
1572         gen_helper_fcmpeq_fcc2();
1573         break;
1574     case 3:
1575         gen_helper_fcmpeq_fcc3();
1576         break;
1577     }
1578 }
1579
1580 #else
1581
1582 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1583 {
1584     gen_helper_fcmps(r_rs1, r_rs2);
1585 }
1586
1587 static inline void gen_op_fcmpd(int fccno)
1588 {
1589     gen_helper_fcmpd();
1590 }
1591
1592 static inline void gen_op_fcmpq(int fccno)
1593 {
1594     gen_helper_fcmpq();
1595 }
1596
1597 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1598 {
1599     gen_helper_fcmpes(r_rs1, r_rs2);
1600 }
1601
1602 static inline void gen_op_fcmped(int fccno)
1603 {
1604     gen_helper_fcmped();
1605 }
1606
1607 static inline void gen_op_fcmpeq(int fccno)
1608 {
1609     gen_helper_fcmpeq();
1610 }
1611 #endif
1612
1613 static inline void gen_op_fpexception_im(int fsr_flags)
1614 {
1615     TCGv_i32 r_const;
1616
1617     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1618     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1619     r_const = tcg_const_i32(TT_FP_EXCP);
1620     gen_helper_raise_exception(r_const);
1621     tcg_temp_free_i32(r_const);
1622 }
1623
1624 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1625 {
1626 #if !defined(CONFIG_USER_ONLY)
1627     if (!dc->fpu_enabled) {
1628         TCGv_i32 r_const;
1629
1630         save_state(dc, r_cond);
1631         r_const = tcg_const_i32(TT_NFPU_INSN);
1632         gen_helper_raise_exception(r_const);
1633         tcg_temp_free_i32(r_const);
1634         dc->is_br = 1;
1635         return 1;
1636     }
1637 #endif
1638     return 0;
1639 }
1640
1641 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1642 {
1643     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1644 }
1645
1646 static inline void gen_clear_float_exceptions(void)
1647 {
1648     gen_helper_clear_float_exceptions();
1649 }
1650
1651 /* asi moves */
1652 #ifdef TARGET_SPARC64
1653 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1654 {
1655     int asi;
1656     TCGv_i32 r_asi;
1657
1658     if (IS_IMM) {
1659         r_asi = tcg_temp_new_i32();
1660         tcg_gen_mov_i32(r_asi, cpu_asi);
1661     } else {
1662         asi = GET_FIELD(insn, 19, 26);
1663         r_asi = tcg_const_i32(asi);
1664     }
1665     return r_asi;
1666 }
1667
1668 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1669                               int sign)
1670 {
1671     TCGv_i32 r_asi, r_size, r_sign;
1672
1673     r_asi = gen_get_asi(insn, addr);
1674     r_size = tcg_const_i32(size);
1675     r_sign = tcg_const_i32(sign);
1676     gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1677     tcg_temp_free_i32(r_sign);
1678     tcg_temp_free_i32(r_size);
1679     tcg_temp_free_i32(r_asi);
1680 }
1681
1682 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1683 {
1684     TCGv_i32 r_asi, r_size;
1685
1686     r_asi = gen_get_asi(insn, addr);
1687     r_size = tcg_const_i32(size);
1688     gen_helper_st_asi(addr, src, r_asi, r_size);
1689     tcg_temp_free_i32(r_size);
1690     tcg_temp_free_i32(r_asi);
1691 }
1692
1693 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1694 {
1695     TCGv_i32 r_asi, r_size, r_rd;
1696
1697     r_asi = gen_get_asi(insn, addr);
1698     r_size = tcg_const_i32(size);
1699     r_rd = tcg_const_i32(rd);
1700     gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1701     tcg_temp_free_i32(r_rd);
1702     tcg_temp_free_i32(r_size);
1703     tcg_temp_free_i32(r_asi);
1704 }
1705
1706 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1707 {
1708     TCGv_i32 r_asi, r_size, r_rd;
1709
1710     r_asi = gen_get_asi(insn, addr);
1711     r_size = tcg_const_i32(size);
1712     r_rd = tcg_const_i32(rd);
1713     gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1714     tcg_temp_free_i32(r_rd);
1715     tcg_temp_free_i32(r_size);
1716     tcg_temp_free_i32(r_asi);
1717 }
1718
1719 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1720 {
1721     TCGv_i32 r_asi, r_size, r_sign;
1722
1723     r_asi = gen_get_asi(insn, addr);
1724     r_size = tcg_const_i32(4);
1725     r_sign = tcg_const_i32(0);
1726     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1727     tcg_temp_free_i32(r_sign);
1728     gen_helper_st_asi(addr, dst, r_asi, r_size);
1729     tcg_temp_free_i32(r_size);
1730     tcg_temp_free_i32(r_asi);
1731     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1732 }
1733
1734 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1735 {
1736     TCGv_i32 r_asi, r_rd;
1737
1738     r_asi = gen_get_asi(insn, addr);
1739     r_rd = tcg_const_i32(rd);
1740     gen_helper_ldda_asi(addr, r_asi, r_rd);
1741     tcg_temp_free_i32(r_rd);
1742     tcg_temp_free_i32(r_asi);
1743 }
1744
1745 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1746 {
1747     TCGv_i32 r_asi, r_size;
1748
1749     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1750     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1751     r_asi = gen_get_asi(insn, addr);
1752     r_size = tcg_const_i32(8);
1753     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1754     tcg_temp_free_i32(r_size);
1755     tcg_temp_free_i32(r_asi);
1756 }
1757
1758 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1759                                int rd)
1760 {
1761     TCGv r_val1;
1762     TCGv_i32 r_asi;
1763
1764     r_val1 = tcg_temp_new();
1765     gen_movl_reg_TN(rd, r_val1);
1766     r_asi = gen_get_asi(insn, addr);
1767     gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1768     tcg_temp_free_i32(r_asi);
1769     tcg_temp_free(r_val1);
1770 }
1771
1772 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1773                                 int rd)
1774 {
1775     TCGv_i32 r_asi;
1776
1777     gen_movl_reg_TN(rd, cpu_tmp64);
1778     r_asi = gen_get_asi(insn, addr);
1779     gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1780     tcg_temp_free_i32(r_asi);
1781 }
1782
1783 #elif !defined(CONFIG_USER_ONLY)
1784
1785 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1786                               int sign)
1787 {
1788     TCGv_i32 r_asi, r_size, r_sign;
1789
1790     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1791     r_size = tcg_const_i32(size);
1792     r_sign = tcg_const_i32(sign);
1793     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1794     tcg_temp_free(r_sign);
1795     tcg_temp_free(r_size);
1796     tcg_temp_free(r_asi);
1797     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1798 }
1799
1800 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1801 {
1802     TCGv_i32 r_asi, r_size;
1803
1804     tcg_gen_extu_tl_i64(cpu_tmp64, src);
1805     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1806     r_size = tcg_const_i32(size);
1807     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1808     tcg_temp_free(r_size);
1809     tcg_temp_free(r_asi);
1810 }
1811
1812 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1813 {
1814     TCGv_i32 r_asi, r_size, r_sign;
1815     TCGv_i64 r_val;
1816
1817     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1818     r_size = tcg_const_i32(4);
1819     r_sign = tcg_const_i32(0);
1820     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1821     tcg_temp_free(r_sign);
1822     r_val = tcg_temp_new_i64();
1823     tcg_gen_extu_tl_i64(r_val, dst);
1824     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1825     tcg_temp_free_i64(r_val);
1826     tcg_temp_free(r_size);
1827     tcg_temp_free(r_asi);
1828     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1829 }
1830
1831 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1832 {
1833     TCGv_i32 r_asi, r_size, r_sign;
1834
1835     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1836     r_size = tcg_const_i32(8);
1837     r_sign = tcg_const_i32(0);
1838     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1839     tcg_temp_free(r_sign);
1840     tcg_temp_free(r_size);
1841     tcg_temp_free(r_asi);
1842     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1843     gen_movl_TN_reg(rd + 1, cpu_tmp0);
1844     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1845     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1846     gen_movl_TN_reg(rd, hi);
1847 }
1848
1849 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1850 {
1851     TCGv_i32 r_asi, r_size;
1852
1853     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1854     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1855     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1856     r_size = tcg_const_i32(8);
1857     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1858     tcg_temp_free(r_size);
1859     tcg_temp_free(r_asi);
1860 }
1861 #endif
1862
1863 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1864 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1865 {
1866     TCGv_i64 r_val;
1867     TCGv_i32 r_asi, r_size;
1868
1869     gen_ld_asi(dst, addr, insn, 1, 0);
1870
1871     r_val = tcg_const_i64(0xffULL);
1872     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1873     r_size = tcg_const_i32(1);
1874     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1875     tcg_temp_free_i32(r_size);
1876     tcg_temp_free_i32(r_asi);
1877     tcg_temp_free_i64(r_val);
1878 }
1879 #endif
1880
1881 static inline TCGv get_src1(unsigned int insn, TCGv def)
1882 {
1883     TCGv r_rs1 = def;
1884     unsigned int rs1;
1885
1886     rs1 = GET_FIELD(insn, 13, 17);
1887     if (rs1 == 0)
1888         r_rs1 = tcg_const_tl(0); // XXX how to free?
1889     else if (rs1 < 8)
1890         r_rs1 = cpu_gregs[rs1];
1891     else
1892         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1893     return r_rs1;
1894 }
1895
1896 static inline TCGv get_src2(unsigned int insn, TCGv def)
1897 {
1898     TCGv r_rs2 = def;
1899
1900     if (IS_IMM) { /* immediate */
1901         target_long simm;
1902
1903         simm = GET_FIELDs(insn, 19, 31);
1904         r_rs2 = tcg_const_tl(simm); // XXX how to free?
1905     } else { /* register */
1906         unsigned int rs2;
1907
1908         rs2 = GET_FIELD(insn, 27, 31);
1909         if (rs2 == 0)
1910             r_rs2 = tcg_const_tl(0); // XXX how to free?
1911         else if (rs2 < 8)
1912             r_rs2 = cpu_gregs[rs2];
1913         else
1914             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1915     }
1916     return r_rs2;
1917 }
1918
1919 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
1920     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1921         goto illegal_insn;
1922 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1923     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1924         goto nfpu_insn;
1925
1926 /* before an instruction, dc->pc must be static */
1927 static void disas_sparc_insn(DisasContext * dc)
1928 {
1929     unsigned int insn, opc, rs1, rs2, rd;
1930     target_long simm;
1931
1932     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1933         tcg_gen_debug_insn_start(dc->pc);
1934     insn = ldl_code(dc->pc);
1935     opc = GET_FIELD(insn, 0, 1);
1936
1937     rd = GET_FIELD(insn, 2, 6);
1938
1939     cpu_src1 = tcg_temp_new(); // const
1940     cpu_src2 = tcg_temp_new(); // const
1941
1942     switch (opc) {
1943     case 0:                     /* branches/sethi */
1944         {
1945             unsigned int xop = GET_FIELD(insn, 7, 9);
1946             int32_t target;
1947             switch (xop) {
1948 #ifdef TARGET_SPARC64
1949             case 0x1:           /* V9 BPcc */
1950                 {
1951                     int cc;
1952
1953                     target = GET_FIELD_SP(insn, 0, 18);
1954                     target = sign_extend(target, 18);
1955                     target <<= 2;
1956                     cc = GET_FIELD_SP(insn, 20, 21);
1957                     if (cc == 0)
1958                         do_branch(dc, target, insn, 0, cpu_cond);
1959                     else if (cc == 2)
1960                         do_branch(dc, target, insn, 1, cpu_cond);
1961                     else
1962                         goto illegal_insn;
1963                     goto jmp_insn;
1964                 }
1965             case 0x3:           /* V9 BPr */
1966                 {
1967                     target = GET_FIELD_SP(insn, 0, 13) |
1968                         (GET_FIELD_SP(insn, 20, 21) << 14);
1969                     target = sign_extend(target, 16);
1970                     target <<= 2;
1971                     cpu_src1 = get_src1(insn, cpu_src1);
1972                     do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1973                     goto jmp_insn;
1974                 }
1975             case 0x5:           /* V9 FBPcc */
1976                 {
1977                     int cc = GET_FIELD_SP(insn, 20, 21);
1978                     if (gen_trap_ifnofpu(dc, cpu_cond))
1979                         goto jmp_insn;
1980                     target = GET_FIELD_SP(insn, 0, 18);
1981                     target = sign_extend(target, 19);
1982                     target <<= 2;
1983                     do_fbranch(dc, target, insn, cc, cpu_cond);
1984                     goto jmp_insn;
1985                 }
1986 #else
1987             case 0x7:           /* CBN+x */
1988                 {
1989                     goto ncp_insn;
1990                 }
1991 #endif
1992             case 0x2:           /* BN+x */
1993                 {
1994                     target = GET_FIELD(insn, 10, 31);
1995                     target = sign_extend(target, 22);
1996                     target <<= 2;
1997                     do_branch(dc, target, insn, 0, cpu_cond);
1998                     goto jmp_insn;
1999                 }
2000             case 0x6:           /* FBN+x */
2001                 {
2002                     if (gen_trap_ifnofpu(dc, cpu_cond))
2003                         goto jmp_insn;
2004                     target = GET_FIELD(insn, 10, 31);
2005                     target = sign_extend(target, 22);
2006                     target <<= 2;
2007                     do_fbranch(dc, target, insn, 0, cpu_cond);
2008                     goto jmp_insn;
2009                 }
2010             case 0x4:           /* SETHI */
2011                 if (rd) { // nop
2012                     uint32_t value = GET_FIELD(insn, 10, 31);
2013                     TCGv r_const;
2014
2015                     r_const = tcg_const_tl(value << 10);
2016                     gen_movl_TN_reg(rd, r_const);
2017                     tcg_temp_free(r_const);
2018                 }
2019                 break;
2020             case 0x0:           /* UNIMPL */
2021             default:
2022                 goto illegal_insn;
2023             }
2024             break;
2025         }
2026         break;
2027     case 1:                     /*CALL*/
2028         {
2029             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2030             TCGv r_const;
2031
2032             r_const = tcg_const_tl(dc->pc);
2033             gen_movl_TN_reg(15, r_const);
2034             tcg_temp_free(r_const);
2035             target += dc->pc;
2036             gen_mov_pc_npc(dc, cpu_cond);
2037             dc->npc = target;
2038         }
2039         goto jmp_insn;
2040     case 2:                     /* FPU & Logical Operations */
2041         {
2042             unsigned int xop = GET_FIELD(insn, 7, 12);
2043             if (xop == 0x3a) {  /* generate trap */
2044                 int cond;
2045
2046                 cpu_src1 = get_src1(insn, cpu_src1);
2047                 if (IS_IMM) {
2048                     rs2 = GET_FIELD(insn, 25, 31);
2049                     tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2050                 } else {
2051                     rs2 = GET_FIELD(insn, 27, 31);
2052                     if (rs2 != 0) {
2053                         gen_movl_reg_TN(rs2, cpu_src2);
2054                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2055                     } else
2056                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
2057                 }
2058                 cond = GET_FIELD(insn, 3, 6);
2059                 if (cond == 0x8) {
2060                     save_state(dc, cpu_cond);
2061                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2062                         supervisor(dc))
2063                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2064                     else
2065                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2066                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2067                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2068                     gen_helper_raise_exception(cpu_tmp32);
2069                 } else if (cond != 0) {
2070                     TCGv r_cond = tcg_temp_new();
2071                     int l1;
2072 #ifdef TARGET_SPARC64
2073                     /* V9 icc/xcc */
2074                     int cc = GET_FIELD_SP(insn, 11, 12);
2075
2076                     save_state(dc, cpu_cond);
2077                     if (cc == 0)
2078                         gen_cond(r_cond, 0, cond, dc);
2079                     else if (cc == 2)
2080                         gen_cond(r_cond, 1, cond, dc);
2081                     else
2082                         goto illegal_insn;
2083 #else
2084                     save_state(dc, cpu_cond);
2085                     gen_cond(r_cond, 0, cond, dc);
2086 #endif
2087                     l1 = gen_new_label();
2088                     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2089
2090                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2091                         supervisor(dc))
2092                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2093                     else
2094                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2095                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2096                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2097                     gen_helper_raise_exception(cpu_tmp32);
2098
2099                     gen_set_label(l1);
2100                     tcg_temp_free(r_cond);
2101                 }
2102                 gen_op_next_insn();
2103                 tcg_gen_exit_tb(0);
2104                 dc->is_br = 1;
2105                 goto jmp_insn;
2106             } else if (xop == 0x28) {
2107                 rs1 = GET_FIELD(insn, 13, 17);
2108                 switch(rs1) {
2109                 case 0: /* rdy */
2110 #ifndef TARGET_SPARC64
2111                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2112                                        manual, rdy on the microSPARC
2113                                        II */
2114                 case 0x0f:          /* stbar in the SPARCv8 manual,
2115                                        rdy on the microSPARC II */
2116                 case 0x10 ... 0x1f: /* implementation-dependent in the
2117                                        SPARCv8 manual, rdy on the
2118                                        microSPARC II */
2119 #endif
2120                     gen_movl_TN_reg(rd, cpu_y);
2121                     break;
2122 #ifdef TARGET_SPARC64
2123                 case 0x2: /* V9 rdccr */
2124                     gen_helper_compute_psr();
2125                     gen_helper_rdccr(cpu_dst);
2126                     gen_movl_TN_reg(rd, cpu_dst);
2127                     break;
2128                 case 0x3: /* V9 rdasi */
2129                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2130                     gen_movl_TN_reg(rd, cpu_dst);
2131                     break;
2132                 case 0x4: /* V9 rdtick */
2133                     {
2134                         TCGv_ptr r_tickptr;
2135
2136                         r_tickptr = tcg_temp_new_ptr();
2137                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2138                                        offsetof(CPUState, tick));
2139                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2140                         tcg_temp_free_ptr(r_tickptr);
2141                         gen_movl_TN_reg(rd, cpu_dst);
2142                     }
2143                     break;
2144                 case 0x5: /* V9 rdpc */
2145                     {
2146                         TCGv r_const;
2147
2148                         r_const = tcg_const_tl(dc->pc);
2149                         gen_movl_TN_reg(rd, r_const);
2150                         tcg_temp_free(r_const);
2151                     }
2152                     break;
2153                 case 0x6: /* V9 rdfprs */
2154                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2155                     gen_movl_TN_reg(rd, cpu_dst);
2156                     break;
2157                 case 0xf: /* V9 membar */
2158                     break; /* no effect */
2159                 case 0x13: /* Graphics Status */
2160                     if (gen_trap_ifnofpu(dc, cpu_cond))
2161                         goto jmp_insn;
2162                     gen_movl_TN_reg(rd, cpu_gsr);
2163                     break;
2164                 case 0x16: /* Softint */
2165                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2166                     gen_movl_TN_reg(rd, cpu_dst);
2167                     break;
2168                 case 0x17: /* Tick compare */
2169                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2170                     break;
2171                 case 0x18: /* System tick */
2172                     {
2173                         TCGv_ptr r_tickptr;
2174
2175                         r_tickptr = tcg_temp_new_ptr();
2176                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2177                                        offsetof(CPUState, stick));
2178                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2179                         tcg_temp_free_ptr(r_tickptr);
2180                         gen_movl_TN_reg(rd, cpu_dst);
2181                     }
2182                     break;
2183                 case 0x19: /* System tick compare */
2184                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2185                     break;
2186                 case 0x10: /* Performance Control */
2187                 case 0x11: /* Performance Instrumentation Counter */
2188                 case 0x12: /* Dispatch Control */
2189                 case 0x14: /* Softint set, WO */
2190                 case 0x15: /* Softint clear, WO */
2191 #endif
2192                 default:
2193                     goto illegal_insn;
2194                 }
2195 #if !defined(CONFIG_USER_ONLY)
2196             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2197 #ifndef TARGET_SPARC64
2198                 if (!supervisor(dc))
2199                     goto priv_insn;
2200                 gen_helper_compute_psr();
2201                 dc->cc_op = CC_OP_FLAGS;
2202                 gen_helper_rdpsr(cpu_dst);
2203 #else
2204                 CHECK_IU_FEATURE(dc, HYPV);
2205                 if (!hypervisor(dc))
2206                     goto priv_insn;
2207                 rs1 = GET_FIELD(insn, 13, 17);
2208                 switch (rs1) {
2209                 case 0: // hpstate
2210                     // gen_op_rdhpstate();
2211                     break;
2212                 case 1: // htstate
2213                     // gen_op_rdhtstate();
2214                     break;
2215                 case 3: // hintp
2216                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2217                     break;
2218                 case 5: // htba
2219                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2220                     break;
2221                 case 6: // hver
2222                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2223                     break;
2224                 case 31: // hstick_cmpr
2225                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2226                     break;
2227                 default:
2228                     goto illegal_insn;
2229                 }
2230 #endif
2231                 gen_movl_TN_reg(rd, cpu_dst);
2232                 break;
2233             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2234                 if (!supervisor(dc))
2235                     goto priv_insn;
2236 #ifdef TARGET_SPARC64
2237                 rs1 = GET_FIELD(insn, 13, 17);
2238                 switch (rs1) {
2239                 case 0: // tpc
2240                     {
2241                         TCGv_ptr r_tsptr;
2242
2243                         r_tsptr = tcg_temp_new_ptr();
2244                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2245                                        offsetof(CPUState, tsptr));
2246                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2247                                       offsetof(trap_state, tpc));
2248                         tcg_temp_free_ptr(r_tsptr);
2249                     }
2250                     break;
2251                 case 1: // tnpc
2252                     {
2253                         TCGv_ptr r_tsptr;
2254
2255                         r_tsptr = tcg_temp_new_ptr();
2256                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2257                                        offsetof(CPUState, tsptr));
2258                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2259                                       offsetof(trap_state, tnpc));
2260                         tcg_temp_free_ptr(r_tsptr);
2261                     }
2262                     break;
2263                 case 2: // tstate
2264                     {
2265                         TCGv_ptr r_tsptr;
2266
2267                         r_tsptr = tcg_temp_new_ptr();
2268                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2269                                        offsetof(CPUState, tsptr));
2270                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2271                                       offsetof(trap_state, tstate));
2272                         tcg_temp_free_ptr(r_tsptr);
2273                     }
2274                     break;
2275                 case 3: // tt
2276                     {
2277                         TCGv_ptr r_tsptr;
2278
2279                         r_tsptr = tcg_temp_new_ptr();
2280                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2281                                        offsetof(CPUState, tsptr));
2282                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2283                                        offsetof(trap_state, tt));
2284                         tcg_temp_free_ptr(r_tsptr);
2285                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2286                     }
2287                     break;
2288                 case 4: // tick
2289                     {
2290                         TCGv_ptr r_tickptr;
2291
2292                         r_tickptr = tcg_temp_new_ptr();
2293                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2294                                        offsetof(CPUState, tick));
2295                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2296                         gen_movl_TN_reg(rd, cpu_tmp0);
2297                         tcg_temp_free_ptr(r_tickptr);
2298                     }
2299                     break;
2300                 case 5: // tba
2301                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2302                     break;
2303                 case 6: // pstate
2304                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2305                                    offsetof(CPUSPARCState, pstate));
2306                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2307                     break;
2308                 case 7: // tl
2309                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2310                                    offsetof(CPUSPARCState, tl));
2311                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2312                     break;
2313                 case 8: // pil
2314                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2315                                    offsetof(CPUSPARCState, psrpil));
2316                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2317                     break;
2318                 case 9: // cwp
2319                     gen_helper_rdcwp(cpu_tmp0);
2320                     break;
2321                 case 10: // cansave
2322                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2323                                    offsetof(CPUSPARCState, cansave));
2324                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2325                     break;
2326                 case 11: // canrestore
2327                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2328                                    offsetof(CPUSPARCState, canrestore));
2329                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2330                     break;
2331                 case 12: // cleanwin
2332                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2333                                    offsetof(CPUSPARCState, cleanwin));
2334                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2335                     break;
2336                 case 13: // otherwin
2337                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2338                                    offsetof(CPUSPARCState, otherwin));
2339                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2340                     break;
2341                 case 14: // wstate
2342                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2343                                    offsetof(CPUSPARCState, wstate));
2344                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2345                     break;
2346                 case 16: // UA2005 gl
2347                     CHECK_IU_FEATURE(dc, GL);
2348                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2349                                    offsetof(CPUSPARCState, gl));
2350                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2351                     break;
2352                 case 26: // UA2005 strand status
2353                     CHECK_IU_FEATURE(dc, HYPV);
2354                     if (!hypervisor(dc))
2355                         goto priv_insn;
2356                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2357                     break;
2358                 case 31: // ver
2359                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2360                     break;
2361                 case 15: // fq
2362                 default:
2363                     goto illegal_insn;
2364                 }
2365 #else
2366                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2367 #endif
2368                 gen_movl_TN_reg(rd, cpu_tmp0);
2369                 break;
2370             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2371 #ifdef TARGET_SPARC64
2372                 save_state(dc, cpu_cond);
2373                 gen_helper_flushw();
2374 #else
2375                 if (!supervisor(dc))
2376                     goto priv_insn;
2377                 gen_movl_TN_reg(rd, cpu_tbr);
2378 #endif
2379                 break;
2380 #endif
2381             } else if (xop == 0x34) {   /* FPU Operations */
2382                 if (gen_trap_ifnofpu(dc, cpu_cond))
2383                     goto jmp_insn;
2384                 gen_op_clear_ieee_excp_and_FTT();
2385                 rs1 = GET_FIELD(insn, 13, 17);
2386                 rs2 = GET_FIELD(insn, 27, 31);
2387                 xop = GET_FIELD(insn, 18, 26);
2388                 switch (xop) {
2389                 case 0x1: /* fmovs */
2390                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2391                     break;
2392                 case 0x5: /* fnegs */
2393                     gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2394                     break;
2395                 case 0x9: /* fabss */
2396                     gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2397                     break;
2398                 case 0x29: /* fsqrts */
2399                     CHECK_FPU_FEATURE(dc, FSQRT);
2400                     gen_clear_float_exceptions();
2401                     gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2402                     gen_helper_check_ieee_exceptions();
2403                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2404                     break;
2405                 case 0x2a: /* fsqrtd */
2406                     CHECK_FPU_FEATURE(dc, FSQRT);
2407                     gen_op_load_fpr_DT1(DFPREG(rs2));
2408                     gen_clear_float_exceptions();
2409                     gen_helper_fsqrtd();
2410                     gen_helper_check_ieee_exceptions();
2411                     gen_op_store_DT0_fpr(DFPREG(rd));
2412                     break;
2413                 case 0x2b: /* fsqrtq */
2414                     CHECK_FPU_FEATURE(dc, FLOAT128);
2415                     gen_op_load_fpr_QT1(QFPREG(rs2));
2416                     gen_clear_float_exceptions();
2417                     gen_helper_fsqrtq();
2418                     gen_helper_check_ieee_exceptions();
2419                     gen_op_store_QT0_fpr(QFPREG(rd));
2420                     break;
2421                 case 0x41: /* fadds */
2422                     gen_clear_float_exceptions();
2423                     gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2424                     gen_helper_check_ieee_exceptions();
2425                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2426                     break;
2427                 case 0x42: /* faddd */
2428                     gen_op_load_fpr_DT0(DFPREG(rs1));
2429                     gen_op_load_fpr_DT1(DFPREG(rs2));
2430                     gen_clear_float_exceptions();
2431                     gen_helper_faddd();
2432                     gen_helper_check_ieee_exceptions();
2433                     gen_op_store_DT0_fpr(DFPREG(rd));
2434                     break;
2435                 case 0x43: /* faddq */
2436                     CHECK_FPU_FEATURE(dc, FLOAT128);
2437                     gen_op_load_fpr_QT0(QFPREG(rs1));
2438                     gen_op_load_fpr_QT1(QFPREG(rs2));
2439                     gen_clear_float_exceptions();
2440                     gen_helper_faddq();
2441                     gen_helper_check_ieee_exceptions();
2442                     gen_op_store_QT0_fpr(QFPREG(rd));
2443                     break;
2444                 case 0x45: /* fsubs */
2445                     gen_clear_float_exceptions();
2446                     gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2447                     gen_helper_check_ieee_exceptions();
2448                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2449                     break;
2450                 case 0x46: /* fsubd */
2451                     gen_op_load_fpr_DT0(DFPREG(rs1));
2452                     gen_op_load_fpr_DT1(DFPREG(rs2));
2453                     gen_clear_float_exceptions();
2454                     gen_helper_fsubd();
2455                     gen_helper_check_ieee_exceptions();
2456                     gen_op_store_DT0_fpr(DFPREG(rd));
2457                     break;
2458                 case 0x47: /* fsubq */
2459                     CHECK_FPU_FEATURE(dc, FLOAT128);
2460                     gen_op_load_fpr_QT0(QFPREG(rs1));
2461                     gen_op_load_fpr_QT1(QFPREG(rs2));
2462                     gen_clear_float_exceptions();
2463                     gen_helper_fsubq();
2464                     gen_helper_check_ieee_exceptions();
2465                     gen_op_store_QT0_fpr(QFPREG(rd));
2466                     break;
2467                 case 0x49: /* fmuls */
2468                     CHECK_FPU_FEATURE(dc, FMUL);
2469                     gen_clear_float_exceptions();
2470                     gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2471                     gen_helper_check_ieee_exceptions();
2472                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2473                     break;
2474                 case 0x4a: /* fmuld */
2475                     CHECK_FPU_FEATURE(dc, FMUL);
2476                     gen_op_load_fpr_DT0(DFPREG(rs1));
2477                     gen_op_load_fpr_DT1(DFPREG(rs2));
2478                     gen_clear_float_exceptions();
2479                     gen_helper_fmuld();
2480                     gen_helper_check_ieee_exceptions();
2481                     gen_op_store_DT0_fpr(DFPREG(rd));
2482                     break;
2483                 case 0x4b: /* fmulq */
2484                     CHECK_FPU_FEATURE(dc, FLOAT128);
2485                     CHECK_FPU_FEATURE(dc, FMUL);
2486                     gen_op_load_fpr_QT0(QFPREG(rs1));
2487                     gen_op_load_fpr_QT1(QFPREG(rs2));
2488                     gen_clear_float_exceptions();
2489                     gen_helper_fmulq();
2490                     gen_helper_check_ieee_exceptions();
2491                     gen_op_store_QT0_fpr(QFPREG(rd));
2492                     break;
2493                 case 0x4d: /* fdivs */
2494                     gen_clear_float_exceptions();
2495                     gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2496                     gen_helper_check_ieee_exceptions();
2497                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2498                     break;
2499                 case 0x4e: /* fdivd */
2500                     gen_op_load_fpr_DT0(DFPREG(rs1));
2501                     gen_op_load_fpr_DT1(DFPREG(rs2));
2502                     gen_clear_float_exceptions();
2503                     gen_helper_fdivd();
2504                     gen_helper_check_ieee_exceptions();
2505                     gen_op_store_DT0_fpr(DFPREG(rd));
2506                     break;
2507                 case 0x4f: /* fdivq */
2508                     CHECK_FPU_FEATURE(dc, FLOAT128);
2509                     gen_op_load_fpr_QT0(QFPREG(rs1));
2510                     gen_op_load_fpr_QT1(QFPREG(rs2));
2511                     gen_clear_float_exceptions();
2512                     gen_helper_fdivq();
2513                     gen_helper_check_ieee_exceptions();
2514                     gen_op_store_QT0_fpr(QFPREG(rd));
2515                     break;
2516                 case 0x69: /* fsmuld */
2517                     CHECK_FPU_FEATURE(dc, FSMULD);
2518                     gen_clear_float_exceptions();
2519                     gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2520                     gen_helper_check_ieee_exceptions();
2521                     gen_op_store_DT0_fpr(DFPREG(rd));
2522                     break;
2523                 case 0x6e: /* fdmulq */
2524                     CHECK_FPU_FEATURE(dc, FLOAT128);
2525                     gen_op_load_fpr_DT0(DFPREG(rs1));
2526                     gen_op_load_fpr_DT1(DFPREG(rs2));
2527                     gen_clear_float_exceptions();
2528                     gen_helper_fdmulq();
2529                     gen_helper_check_ieee_exceptions();
2530                     gen_op_store_QT0_fpr(QFPREG(rd));
2531                     break;
2532                 case 0xc4: /* fitos */
2533                     gen_clear_float_exceptions();
2534                     gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2535                     gen_helper_check_ieee_exceptions();
2536                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2537                     break;
2538                 case 0xc6: /* fdtos */
2539                     gen_op_load_fpr_DT1(DFPREG(rs2));
2540                     gen_clear_float_exceptions();
2541                     gen_helper_fdtos(cpu_tmp32);
2542                     gen_helper_check_ieee_exceptions();
2543                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2544                     break;
2545                 case 0xc7: /* fqtos */
2546                     CHECK_FPU_FEATURE(dc, FLOAT128);
2547                     gen_op_load_fpr_QT1(QFPREG(rs2));
2548                     gen_clear_float_exceptions();
2549                     gen_helper_fqtos(cpu_tmp32);
2550                     gen_helper_check_ieee_exceptions();
2551                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2552                     break;
2553                 case 0xc8: /* fitod */
2554                     gen_helper_fitod(cpu_fpr[rs2]);
2555                     gen_op_store_DT0_fpr(DFPREG(rd));
2556                     break;
2557                 case 0xc9: /* fstod */
2558                     gen_helper_fstod(cpu_fpr[rs2]);
2559                     gen_op_store_DT0_fpr(DFPREG(rd));
2560                     break;
2561                 case 0xcb: /* fqtod */
2562                     CHECK_FPU_FEATURE(dc, FLOAT128);
2563                     gen_op_load_fpr_QT1(QFPREG(rs2));
2564                     gen_clear_float_exceptions();
2565                     gen_helper_fqtod();
2566                     gen_helper_check_ieee_exceptions();
2567                     gen_op_store_DT0_fpr(DFPREG(rd));
2568                     break;
2569                 case 0xcc: /* fitoq */
2570                     CHECK_FPU_FEATURE(dc, FLOAT128);
2571                     gen_helper_fitoq(cpu_fpr[rs2]);
2572                     gen_op_store_QT0_fpr(QFPREG(rd));
2573                     break;
2574                 case 0xcd: /* fstoq */
2575                     CHECK_FPU_FEATURE(dc, FLOAT128);
2576                     gen_helper_fstoq(cpu_fpr[rs2]);
2577                     gen_op_store_QT0_fpr(QFPREG(rd));
2578                     break;
2579                 case 0xce: /* fdtoq */
2580                     CHECK_FPU_FEATURE(dc, FLOAT128);
2581                     gen_op_load_fpr_DT1(DFPREG(rs2));
2582                     gen_helper_fdtoq();
2583                     gen_op_store_QT0_fpr(QFPREG(rd));
2584                     break;
2585                 case 0xd1: /* fstoi */
2586                     gen_clear_float_exceptions();
2587                     gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2588                     gen_helper_check_ieee_exceptions();
2589                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2590                     break;
2591                 case 0xd2: /* fdtoi */
2592                     gen_op_load_fpr_DT1(DFPREG(rs2));
2593                     gen_clear_float_exceptions();
2594                     gen_helper_fdtoi(cpu_tmp32);
2595                     gen_helper_check_ieee_exceptions();
2596                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2597                     break;
2598                 case 0xd3: /* fqtoi */
2599                     CHECK_FPU_FEATURE(dc, FLOAT128);
2600                     gen_op_load_fpr_QT1(QFPREG(rs2));
2601                     gen_clear_float_exceptions();
2602                     gen_helper_fqtoi(cpu_tmp32);
2603                     gen_helper_check_ieee_exceptions();
2604                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2605                     break;
2606 #ifdef TARGET_SPARC64
2607                 case 0x2: /* V9 fmovd */
2608                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2609                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2610                                     cpu_fpr[DFPREG(rs2) + 1]);
2611                     break;
2612                 case 0x3: /* V9 fmovq */
2613                     CHECK_FPU_FEATURE(dc, FLOAT128);
2614                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2615                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2616                                     cpu_fpr[QFPREG(rs2) + 1]);
2617                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2618                                     cpu_fpr[QFPREG(rs2) + 2]);
2619                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2620                                     cpu_fpr[QFPREG(rs2) + 3]);
2621                     break;
2622                 case 0x6: /* V9 fnegd */
2623                     gen_op_load_fpr_DT1(DFPREG(rs2));
2624                     gen_helper_fnegd();
2625                     gen_op_store_DT0_fpr(DFPREG(rd));
2626                     break;
2627                 case 0x7: /* V9 fnegq */
2628                     CHECK_FPU_FEATURE(dc, FLOAT128);
2629                     gen_op_load_fpr_QT1(QFPREG(rs2));
2630                     gen_helper_fnegq();
2631                     gen_op_store_QT0_fpr(QFPREG(rd));
2632                     break;
2633                 case 0xa: /* V9 fabsd */
2634                     gen_op_load_fpr_DT1(DFPREG(rs2));
2635                     gen_helper_fabsd();
2636                     gen_op_store_DT0_fpr(DFPREG(rd));
2637                     break;
2638                 case 0xb: /* V9 fabsq */
2639                     CHECK_FPU_FEATURE(dc, FLOAT128);
2640                     gen_op_load_fpr_QT1(QFPREG(rs2));
2641                     gen_helper_fabsq();
2642                     gen_op_store_QT0_fpr(QFPREG(rd));
2643                     break;
2644                 case 0x81: /* V9 fstox */
2645                     gen_clear_float_exceptions();
2646                     gen_helper_fstox(cpu_fpr[rs2]);
2647                     gen_helper_check_ieee_exceptions();
2648                     gen_op_store_DT0_fpr(DFPREG(rd));
2649                     break;
2650                 case 0x82: /* V9 fdtox */
2651                     gen_op_load_fpr_DT1(DFPREG(rs2));
2652                     gen_clear_float_exceptions();
2653                     gen_helper_fdtox();
2654                     gen_helper_check_ieee_exceptions();
2655                     gen_op_store_DT0_fpr(DFPREG(rd));
2656                     break;
2657                 case 0x83: /* V9 fqtox */
2658                     CHECK_FPU_FEATURE(dc, FLOAT128);
2659                     gen_op_load_fpr_QT1(QFPREG(rs2));
2660                     gen_clear_float_exceptions();
2661                     gen_helper_fqtox();
2662                     gen_helper_check_ieee_exceptions();
2663                     gen_op_store_DT0_fpr(DFPREG(rd));
2664                     break;
2665                 case 0x84: /* V9 fxtos */
2666                     gen_op_load_fpr_DT1(DFPREG(rs2));
2667                     gen_clear_float_exceptions();
2668                     gen_helper_fxtos(cpu_tmp32);
2669                     gen_helper_check_ieee_exceptions();
2670                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2671                     break;
2672                 case 0x88: /* V9 fxtod */
2673                     gen_op_load_fpr_DT1(DFPREG(rs2));
2674                     gen_clear_float_exceptions();
2675                     gen_helper_fxtod();
2676                     gen_helper_check_ieee_exceptions();
2677                     gen_op_store_DT0_fpr(DFPREG(rd));
2678                     break;
2679                 case 0x8c: /* V9 fxtoq */
2680                     CHECK_FPU_FEATURE(dc, FLOAT128);
2681                     gen_op_load_fpr_DT1(DFPREG(rs2));
2682                     gen_clear_float_exceptions();
2683                     gen_helper_fxtoq();
2684                     gen_helper_check_ieee_exceptions();
2685                     gen_op_store_QT0_fpr(QFPREG(rd));
2686                     break;
2687 #endif
2688                 default:
2689                     goto illegal_insn;
2690                 }
2691             } else if (xop == 0x35) {   /* FPU Operations */
2692 #ifdef TARGET_SPARC64
2693                 int cond;
2694 #endif
2695                 if (gen_trap_ifnofpu(dc, cpu_cond))
2696                     goto jmp_insn;
2697                 gen_op_clear_ieee_excp_and_FTT();
2698                 rs1 = GET_FIELD(insn, 13, 17);
2699                 rs2 = GET_FIELD(insn, 27, 31);
2700                 xop = GET_FIELD(insn, 18, 26);
2701 #ifdef TARGET_SPARC64
2702                 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2703                     int l1;
2704
2705                     l1 = gen_new_label();
2706                     cond = GET_FIELD_SP(insn, 14, 17);
2707                     cpu_src1 = get_src1(insn, cpu_src1);
2708                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2709                                        0, l1);
2710                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2711                     gen_set_label(l1);
2712                     break;
2713                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2714                     int l1;
2715
2716                     l1 = gen_new_label();
2717                     cond = GET_FIELD_SP(insn, 14, 17);
2718                     cpu_src1 = get_src1(insn, cpu_src1);
2719                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2720                                        0, l1);
2721                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2722                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2723                     gen_set_label(l1);
2724                     break;
2725                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2726                     int l1;
2727
2728                     CHECK_FPU_FEATURE(dc, FLOAT128);
2729                     l1 = gen_new_label();
2730                     cond = GET_FIELD_SP(insn, 14, 17);
2731                     cpu_src1 = get_src1(insn, cpu_src1);
2732                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2733                                        0, l1);
2734                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2735                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2736                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2737                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2738                     gen_set_label(l1);
2739                     break;
2740                 }
2741 #endif
2742                 switch (xop) {
2743 #ifdef TARGET_SPARC64
2744 #define FMOVSCC(fcc)                                                    \
2745                     {                                                   \
2746                         TCGv r_cond;                                    \
2747                         int l1;                                         \
2748                                                                         \
2749                         l1 = gen_new_label();                           \
2750                         r_cond = tcg_temp_new();                        \
2751                         cond = GET_FIELD_SP(insn, 14, 17);              \
2752                         gen_fcond(r_cond, fcc, cond);                   \
2753                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2754                                            0, l1);                      \
2755                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2756                         gen_set_label(l1);                              \
2757                         tcg_temp_free(r_cond);                          \
2758                     }
2759 #define FMOVDCC(fcc)                                                    \
2760                     {                                                   \
2761                         TCGv r_cond;                                    \
2762                         int l1;                                         \
2763                                                                         \
2764                         l1 = gen_new_label();                           \
2765                         r_cond = tcg_temp_new();                        \
2766                         cond = GET_FIELD_SP(insn, 14, 17);              \
2767                         gen_fcond(r_cond, fcc, cond);                   \
2768                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2769                                            0, l1);                      \
2770                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2771                                         cpu_fpr[DFPREG(rs2)]);          \
2772                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2773                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2774                         gen_set_label(l1);                              \
2775                         tcg_temp_free(r_cond);                          \
2776                     }
2777 #define FMOVQCC(fcc)                                                    \
2778                     {                                                   \
2779                         TCGv r_cond;                                    \
2780                         int l1;                                         \
2781                                                                         \
2782                         l1 = gen_new_label();                           \
2783                         r_cond = tcg_temp_new();                        \
2784                         cond = GET_FIELD_SP(insn, 14, 17);              \
2785                         gen_fcond(r_cond, fcc, cond);                   \
2786                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2787                                            0, l1);                      \
2788                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2789                                         cpu_fpr[QFPREG(rs2)]);          \
2790                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2791                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2792                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2793                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2794                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2795                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2796                         gen_set_label(l1);                              \
2797                         tcg_temp_free(r_cond);                          \
2798                     }
2799                     case 0x001: /* V9 fmovscc %fcc0 */
2800                         FMOVSCC(0);
2801                         break;
2802                     case 0x002: /* V9 fmovdcc %fcc0 */
2803                         FMOVDCC(0);
2804                         break;
2805                     case 0x003: /* V9 fmovqcc %fcc0 */
2806                         CHECK_FPU_FEATURE(dc, FLOAT128);
2807                         FMOVQCC(0);
2808                         break;
2809                     case 0x041: /* V9 fmovscc %fcc1 */
2810                         FMOVSCC(1);
2811                         break;
2812                     case 0x042: /* V9 fmovdcc %fcc1 */
2813                         FMOVDCC(1);
2814                         break;
2815                     case 0x043: /* V9 fmovqcc %fcc1 */
2816                         CHECK_FPU_FEATURE(dc, FLOAT128);
2817                         FMOVQCC(1);
2818                         break;
2819                     case 0x081: /* V9 fmovscc %fcc2 */
2820                         FMOVSCC(2);
2821                         break;
2822                     case 0x082: /* V9 fmovdcc %fcc2 */
2823                         FMOVDCC(2);
2824                         break;
2825                     case 0x083: /* V9 fmovqcc %fcc2 */
2826                         CHECK_FPU_FEATURE(dc, FLOAT128);
2827                         FMOVQCC(2);
2828                         break;
2829                     case 0x0c1: /* V9 fmovscc %fcc3 */
2830                         FMOVSCC(3);
2831                         break;
2832                     case 0x0c2: /* V9 fmovdcc %fcc3 */
2833                         FMOVDCC(3);
2834                         break;
2835                     case 0x0c3: /* V9 fmovqcc %fcc3 */
2836                         CHECK_FPU_FEATURE(dc, FLOAT128);
2837                         FMOVQCC(3);
2838                         break;
2839 #undef FMOVSCC
2840 #undef FMOVDCC
2841 #undef FMOVQCC
2842 #define FMOVSCC(icc)                                                    \
2843                     {                                                   \
2844                         TCGv r_cond;                                    \
2845                         int l1;                                         \
2846                                                                         \
2847                         l1 = gen_new_label();                           \
2848                         r_cond = tcg_temp_new();                        \
2849                         cond = GET_FIELD_SP(insn, 14, 17);              \
2850                         gen_cond(r_cond, icc, cond, dc);                \
2851                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2852                                            0, l1);                      \
2853                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2854                         gen_set_label(l1);                              \
2855                         tcg_temp_free(r_cond);                          \
2856                     }
2857 #define FMOVDCC(icc)                                                    \
2858                     {                                                   \
2859                         TCGv r_cond;                                    \
2860                         int l1;                                         \
2861                                                                         \
2862                         l1 = gen_new_label();                           \
2863                         r_cond = tcg_temp_new();                        \
2864                         cond = GET_FIELD_SP(insn, 14, 17);              \
2865                         gen_cond(r_cond, icc, cond, dc);                \
2866                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2867                                            0, l1);                      \
2868                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2869                                         cpu_fpr[DFPREG(rs2)]);          \
2870                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2871                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2872                         gen_set_label(l1);                              \
2873                         tcg_temp_free(r_cond);                          \
2874                     }
2875 #define FMOVQCC(icc)                                                    \
2876                     {                                                   \
2877                         TCGv r_cond;                                    \
2878                         int l1;                                         \
2879                                                                         \
2880                         l1 = gen_new_label();                           \
2881                         r_cond = tcg_temp_new();                        \
2882                         cond = GET_FIELD_SP(insn, 14, 17);              \
2883                         gen_cond(r_cond, icc, cond, dc);                \
2884                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2885                                            0, l1);                      \
2886                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2887                                         cpu_fpr[QFPREG(rs2)]);          \
2888                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2889                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2890                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2891                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2892                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2893                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2894                         gen_set_label(l1);                              \
2895                         tcg_temp_free(r_cond);                          \
2896                     }
2897
2898                     case 0x101: /* V9 fmovscc %icc */
2899                         FMOVSCC(0);
2900                         break;
2901                     case 0x102: /* V9 fmovdcc %icc */
2902                         FMOVDCC(0);
2903                     case 0x103: /* V9 fmovqcc %icc */
2904                         CHECK_FPU_FEATURE(dc, FLOAT128);
2905                         FMOVQCC(0);
2906                         break;
2907                     case 0x181: /* V9 fmovscc %xcc */
2908                         FMOVSCC(1);
2909                         break;
2910                     case 0x182: /* V9 fmovdcc %xcc */
2911                         FMOVDCC(1);
2912                         break;
2913                     case 0x183: /* V9 fmovqcc %xcc */
2914                         CHECK_FPU_FEATURE(dc, FLOAT128);
2915                         FMOVQCC(1);
2916                         break;
2917 #undef FMOVSCC
2918 #undef FMOVDCC
2919 #undef FMOVQCC
2920 #endif
2921                     case 0x51: /* fcmps, V9 %fcc */
2922                         gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2923                         break;
2924                     case 0x52: /* fcmpd, V9 %fcc */
2925                         gen_op_load_fpr_DT0(DFPREG(rs1));
2926                         gen_op_load_fpr_DT1(DFPREG(rs2));
2927                         gen_op_fcmpd(rd & 3);
2928                         break;
2929                     case 0x53: /* fcmpq, V9 %fcc */
2930                         CHECK_FPU_FEATURE(dc, FLOAT128);
2931                         gen_op_load_fpr_QT0(QFPREG(rs1));
2932                         gen_op_load_fpr_QT1(QFPREG(rs2));
2933                         gen_op_fcmpq(rd & 3);
2934                         break;
2935                     case 0x55: /* fcmpes, V9 %fcc */
2936                         gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2937                         break;
2938                     case 0x56: /* fcmped, V9 %fcc */
2939                         gen_op_load_fpr_DT0(DFPREG(rs1));
2940                         gen_op_load_fpr_DT1(DFPREG(rs2));
2941                         gen_op_fcmped(rd & 3);
2942                         break;
2943                     case 0x57: /* fcmpeq, V9 %fcc */
2944                         CHECK_FPU_FEATURE(dc, FLOAT128);
2945                         gen_op_load_fpr_QT0(QFPREG(rs1));
2946                         gen_op_load_fpr_QT1(QFPREG(rs2));
2947                         gen_op_fcmpeq(rd & 3);
2948                         break;
2949                     default:
2950                         goto illegal_insn;
2951                 }
2952             } else if (xop == 0x2) {
2953                 // clr/mov shortcut
2954
2955                 rs1 = GET_FIELD(insn, 13, 17);
2956                 if (rs1 == 0) {
2957                     // or %g0, x, y -> mov T0, x; mov y, T0
2958                     if (IS_IMM) {       /* immediate */
2959                         TCGv r_const;
2960
2961                         simm = GET_FIELDs(insn, 19, 31);
2962                         r_const = tcg_const_tl(simm);
2963                         gen_movl_TN_reg(rd, r_const);
2964                         tcg_temp_free(r_const);
2965                     } else {            /* register */
2966                         rs2 = GET_FIELD(insn, 27, 31);
2967                         gen_movl_reg_TN(rs2, cpu_dst);
2968                         gen_movl_TN_reg(rd, cpu_dst);
2969                     }
2970                 } else {
2971                     cpu_src1 = get_src1(insn, cpu_src1);
2972                     if (IS_IMM) {       /* immediate */
2973                         simm = GET_FIELDs(insn, 19, 31);
2974                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2975                         gen_movl_TN_reg(rd, cpu_dst);
2976                     } else {            /* register */
2977                         // or x, %g0, y -> mov T1, x; mov y, T1
2978                         rs2 = GET_FIELD(insn, 27, 31);
2979                         if (rs2 != 0) {
2980                             gen_movl_reg_TN(rs2, cpu_src2);
2981                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2982                             gen_movl_TN_reg(rd, cpu_dst);
2983                         } else
2984                             gen_movl_TN_reg(rd, cpu_src1);
2985                     }
2986                 }
2987 #ifdef TARGET_SPARC64
2988             } else if (xop == 0x25) { /* sll, V9 sllx */
2989                 cpu_src1 = get_src1(insn, cpu_src1);
2990                 if (IS_IMM) {   /* immediate */
2991                     simm = GET_FIELDs(insn, 20, 31);
2992                     if (insn & (1 << 12)) {
2993                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2994                     } else {
2995                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2996                     }
2997                 } else {                /* register */
2998                     rs2 = GET_FIELD(insn, 27, 31);
2999                     gen_movl_reg_TN(rs2, cpu_src2);
3000                     if (insn & (1 << 12)) {
3001                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3002                     } else {
3003                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3004                     }
3005                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3006                 }
3007                 gen_movl_TN_reg(rd, cpu_dst);
3008             } else if (xop == 0x26) { /* srl, V9 srlx */
3009                 cpu_src1 = get_src1(insn, cpu_src1);
3010                 if (IS_IMM) {   /* immediate */
3011                     simm = GET_FIELDs(insn, 20, 31);
3012                     if (insn & (1 << 12)) {
3013                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3014                     } else {
3015                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3016                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3017                     }
3018                 } else {                /* register */
3019                     rs2 = GET_FIELD(insn, 27, 31);
3020                     gen_movl_reg_TN(rs2, cpu_src2);
3021                     if (insn & (1 << 12)) {
3022                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3023                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3024                     } else {
3025                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3026                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3027                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3028                     }
3029                 }
3030                 gen_movl_TN_reg(rd, cpu_dst);
3031             } else if (xop == 0x27) { /* sra, V9 srax */
3032                 cpu_src1 = get_src1(insn, cpu_src1);
3033                 if (IS_IMM) {   /* immediate */
3034                     simm = GET_FIELDs(insn, 20, 31);
3035                     if (insn & (1 << 12)) {
3036                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3037                     } else {
3038                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3039                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3040                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3041                     }
3042                 } else {                /* register */
3043                     rs2 = GET_FIELD(insn, 27, 31);
3044                     gen_movl_reg_TN(rs2, cpu_src2);
3045                     if (insn & (1 << 12)) {
3046                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3047                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3048                     } else {
3049                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3050                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3051                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3052                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3053                     }
3054                 }
3055                 gen_movl_TN_reg(rd, cpu_dst);
3056 #endif
3057             } else if (xop < 0x36) {
3058                 if (xop < 0x20) {
3059                     cpu_src1 = get_src1(insn, cpu_src1);
3060                     cpu_src2 = get_src2(insn, cpu_src2);
3061                     switch (xop & ~0x10) {
3062                     case 0x0: /* add */
3063                         if (IS_IMM) {
3064                             simm = GET_FIELDs(insn, 19, 31);
3065                             if (xop & 0x10) {
3066                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3067                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3068                                 dc->cc_op = CC_OP_ADD;
3069                             } else {
3070                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3071                             }
3072                         } else {
3073                             if (xop & 0x10) {
3074                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3075                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3076                                 dc->cc_op = CC_OP_ADD;
3077                             } else {
3078                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3079                             }
3080                         }
3081                         break;
3082                     case 0x1: /* and */
3083                         if (IS_IMM) {
3084                             simm = GET_FIELDs(insn, 19, 31);
3085                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3086                         } else {
3087                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3088                         }
3089                         if (xop & 0x10) {
3090                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3091                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3092                             dc->cc_op = CC_OP_LOGIC;
3093                         }
3094                         break;
3095                     case 0x2: /* or */
3096                         if (IS_IMM) {
3097                             simm = GET_FIELDs(insn, 19, 31);
3098                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3099                         } else {
3100                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3101                         }
3102                         if (xop & 0x10) {
3103                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3104                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3105                             dc->cc_op = CC_OP_LOGIC;
3106                         }
3107                         break;
3108                     case 0x3: /* xor */
3109                         if (IS_IMM) {
3110                             simm = GET_FIELDs(insn, 19, 31);
3111                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3112                         } else {
3113                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3114                         }
3115                         if (xop & 0x10) {
3116                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3117                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3118                             dc->cc_op = CC_OP_LOGIC;
3119                         }
3120                         break;
3121                     case 0x4: /* sub */
3122                         if (IS_IMM) {
3123                             simm = GET_FIELDs(insn, 19, 31);
3124                             if (xop & 0x10) {
3125                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3126                             } else {
3127                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3128                             }
3129                         } else {
3130                             if (xop & 0x10) {
3131                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3132                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3133                                 dc->cc_op = CC_OP_SUB;
3134                             } else {
3135                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3136                             }
3137                         }
3138                         break;
3139                     case 0x5: /* andn */
3140                         if (IS_IMM) {
3141                             simm = GET_FIELDs(insn, 19, 31);
3142                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3143                         } else {
3144                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3145                         }
3146                         if (xop & 0x10) {
3147                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3148                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3149                             dc->cc_op = CC_OP_LOGIC;
3150                         }
3151                         break;
3152                     case 0x6: /* orn */
3153                         if (IS_IMM) {
3154                             simm = GET_FIELDs(insn, 19, 31);
3155                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3156                         } else {
3157                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3158                         }
3159                         if (xop & 0x10) {
3160                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3161                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3162                             dc->cc_op = CC_OP_LOGIC;
3163                         }
3164                         break;
3165                     case 0x7: /* xorn */
3166                         if (IS_IMM) {
3167                             simm = GET_FIELDs(insn, 19, 31);
3168                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3169                         } else {
3170                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3171                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3172                         }
3173                         if (xop & 0x10) {
3174                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3175                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3176                             dc->cc_op = CC_OP_LOGIC;
3177                         }
3178                         break;
3179                     case 0x8: /* addx, V9 addc */
3180                         if (IS_IMM) {
3181                             simm = GET_FIELDs(insn, 19, 31);
3182                             if (xop & 0x10) {
3183                                 gen_helper_compute_psr();
3184                                 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3185                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3186                                 dc->cc_op = CC_OP_ADDX;
3187                             } else {
3188                                 gen_helper_compute_psr();
3189                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3190                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3191                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3192                             }
3193                         } else {
3194                             if (xop & 0x10) {
3195                                 gen_helper_compute_psr();
3196                                 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3197                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3198                                 dc->cc_op = CC_OP_ADDX;
3199                             } else {
3200                                 gen_helper_compute_psr();
3201                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3202                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3203                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3204                             }
3205                         }
3206                         break;
3207 #ifdef TARGET_SPARC64
3208                     case 0x9: /* V9 mulx */
3209                         if (IS_IMM) {
3210                             simm = GET_FIELDs(insn, 19, 31);
3211                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3212                         } else {
3213                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3214                         }
3215                         break;
3216 #endif
3217                     case 0xa: /* umul */
3218                         CHECK_IU_FEATURE(dc, MUL);
3219                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3220                         if (xop & 0x10) {
3221                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3222                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3223                             dc->cc_op = CC_OP_LOGIC;
3224                         }
3225                         break;
3226                     case 0xb: /* smul */
3227                         CHECK_IU_FEATURE(dc, MUL);
3228                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3229                         if (xop & 0x10) {
3230                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3231                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3232                             dc->cc_op = CC_OP_LOGIC;
3233                         }
3234                         break;
3235                     case 0xc: /* subx, V9 subc */
3236                         if (IS_IMM) {
3237                             simm = GET_FIELDs(insn, 19, 31);
3238                             if (xop & 0x10) {
3239                                 gen_helper_compute_psr();
3240                                 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3241                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3242                                 dc->cc_op = CC_OP_SUBX;
3243                             } else {
3244                                 gen_helper_compute_psr();
3245                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3246                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3247                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3248                             }
3249                         } else {
3250                             if (xop & 0x10) {
3251                                 gen_helper_compute_psr();
3252                                 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3253                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3254                                 dc->cc_op = CC_OP_SUBX;
3255                             } else {
3256                                 gen_helper_compute_psr();
3257                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3258                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3259                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3260                             }
3261                         }
3262                         break;
3263 #ifdef TARGET_SPARC64
3264                     case 0xd: /* V9 udivx */
3265                         tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3266                         tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3267                         gen_trap_ifdivzero_tl(cpu_cc_src2);
3268                         tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3269                         break;
3270 #endif
3271                     case 0xe: /* udiv */
3272                         CHECK_IU_FEATURE(dc, DIV);
3273                         gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3274                         if (xop & 0x10) {
3275                             gen_op_div_cc(cpu_dst);
3276                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3277                             dc->cc_op = CC_OP_FLAGS;
3278                         }
3279                         break;
3280                     case 0xf: /* sdiv */
3281                         CHECK_IU_FEATURE(dc, DIV);
3282                         gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3283                         if (xop & 0x10) {
3284                             gen_op_div_cc(cpu_dst);
3285                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3286                             dc->cc_op = CC_OP_FLAGS;
3287                         }
3288                         break;
3289                     default:
3290                         goto illegal_insn;
3291                     }
3292                     gen_movl_TN_reg(rd, cpu_dst);
3293                 } else {
3294                     cpu_src1 = get_src1(insn, cpu_src1);
3295                     cpu_src2 = get_src2(insn, cpu_src2);
3296                     switch (xop) {
3297                     case 0x20: /* taddcc */
3298                         gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3299                         gen_movl_TN_reg(rd, cpu_dst);
3300                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3301                         dc->cc_op = CC_OP_FLAGS;
3302                         break;
3303                     case 0x21: /* tsubcc */
3304                         gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3305                         gen_movl_TN_reg(rd, cpu_dst);
3306                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3307                         dc->cc_op = CC_OP_FLAGS;
3308                         break;
3309                     case 0x22: /* taddcctv */
3310                         save_state(dc, cpu_cond);
3311                         gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3312                         gen_movl_TN_reg(rd, cpu_dst);
3313                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3314                         dc->cc_op = CC_OP_FLAGS;
3315                         break;
3316                     case 0x23: /* tsubcctv */
3317                         save_state(dc, cpu_cond);
3318                         gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3319                         gen_movl_TN_reg(rd, cpu_dst);
3320                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3321                         dc->cc_op = CC_OP_FLAGS;
3322                         break;
3323                     case 0x24: /* mulscc */
3324                         gen_helper_compute_psr();
3325                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3326                         gen_movl_TN_reg(rd, cpu_dst);
3327                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3328                         dc->cc_op = CC_OP_FLAGS;
3329                         break;
3330 #ifndef TARGET_SPARC64
3331                     case 0x25:  /* sll */
3332                         if (IS_IMM) { /* immediate */
3333                             simm = GET_FIELDs(insn, 20, 31);
3334                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3335                         } else { /* register */
3336                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3337                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3338                         }
3339                         gen_movl_TN_reg(rd, cpu_dst);
3340                         break;
3341                     case 0x26:  /* srl */
3342                         if (IS_IMM) { /* immediate */
3343                             simm = GET_FIELDs(insn, 20, 31);
3344                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3345                         } else { /* register */
3346                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3347                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3348                         }
3349                         gen_movl_TN_reg(rd, cpu_dst);
3350                         break;
3351                     case 0x27:  /* sra */
3352                         if (IS_IMM) { /* immediate */
3353                             simm = GET_FIELDs(insn, 20, 31);
3354                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3355                         } else { /* register */
3356                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3357                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3358                         }
3359                         gen_movl_TN_reg(rd, cpu_dst);
3360                         break;
3361 #endif
3362                     case 0x30:
3363                         {
3364                             switch(rd) {
3365                             case 0: /* wry */
3366                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3367                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3368                                 break;
3369 #ifndef TARGET_SPARC64
3370                             case 0x01 ... 0x0f: /* undefined in the
3371                                                    SPARCv8 manual, nop
3372                                                    on the microSPARC
3373                                                    II */
3374                             case 0x10 ... 0x1f: /* implementation-dependent
3375                                                    in the SPARCv8
3376                                                    manual, nop on the
3377                                                    microSPARC II */
3378                                 break;
3379 #else
3380                             case 0x2: /* V9 wrccr */
3381                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3382                                 gen_helper_wrccr(cpu_dst);
3383                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3384                                 dc->cc_op = CC_OP_FLAGS;
3385                                 break;
3386                             case 0x3: /* V9 wrasi */
3387                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3388                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3389                                 break;
3390                             case 0x6: /* V9 wrfprs */
3391                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3392                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3393                                 save_state(dc, cpu_cond);
3394                                 gen_op_next_insn();
3395                                 tcg_gen_exit_tb(0);
3396                                 dc->is_br = 1;
3397                                 break;
3398                             case 0xf: /* V9 sir, nop if user */
3399 #if !defined(CONFIG_USER_ONLY)
3400                                 if (supervisor(dc))
3401                                     ; // XXX
3402 #endif
3403                                 break;
3404                             case 0x13: /* Graphics Status */
3405                                 if (gen_trap_ifnofpu(dc, cpu_cond))
3406                                     goto jmp_insn;
3407                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3408                                 break;
3409                             case 0x14: /* Softint set */
3410                                 if (!supervisor(dc))
3411                                     goto illegal_insn;
3412                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3413                                 gen_helper_set_softint(cpu_tmp64);
3414                                 break;
3415                             case 0x15: /* Softint clear */
3416                                 if (!supervisor(dc))
3417                                     goto illegal_insn;
3418                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3419                                 gen_helper_clear_softint(cpu_tmp64);
3420                                 break;
3421                             case 0x16: /* Softint write */
3422                                 if (!supervisor(dc))
3423                                     goto illegal_insn;
3424                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3425                                 gen_helper_write_softint(cpu_tmp64);
3426                                 break;
3427                             case 0x17: /* Tick compare */
3428 #if !defined(CONFIG_USER_ONLY)
3429                                 if (!supervisor(dc))
3430                                     goto illegal_insn;
3431 #endif
3432                                 {
3433                                     TCGv_ptr r_tickptr;
3434
3435                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3436                                                    cpu_src2);
3437                                     r_tickptr = tcg_temp_new_ptr();
3438                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3439                                                    offsetof(CPUState, tick));
3440                                     gen_helper_tick_set_limit(r_tickptr,
3441                                                               cpu_tick_cmpr);
3442                                     tcg_temp_free_ptr(r_tickptr);
3443                                 }
3444                                 break;
3445                             case 0x18: /* System tick */
3446 #if !defined(CONFIG_USER_ONLY)
3447                                 if (!supervisor(dc))
3448                                     goto illegal_insn;
3449 #endif
3450                                 {
3451                                     TCGv_ptr r_tickptr;
3452
3453                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3454                                                    cpu_src2);
3455                                     r_tickptr = tcg_temp_new_ptr();
3456                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3457                                                    offsetof(CPUState, stick));
3458                                     gen_helper_tick_set_count(r_tickptr,
3459                                                               cpu_dst);
3460                                     tcg_temp_free_ptr(r_tickptr);
3461                                 }
3462                                 break;
3463                             case 0x19: /* System tick compare */
3464 #if !defined(CONFIG_USER_ONLY)
3465                                 if (!supervisor(dc))
3466                                     goto illegal_insn;
3467 #endif
3468                                 {
3469                                     TCGv_ptr r_tickptr;
3470
3471                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3472                                                    cpu_src2);
3473                                     r_tickptr = tcg_temp_new_ptr();
3474                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3475                                                    offsetof(CPUState, stick));
3476                                     gen_helper_tick_set_limit(r_tickptr,
3477                                                               cpu_stick_cmpr);
3478                                     tcg_temp_free_ptr(r_tickptr);
3479                                 }
3480                                 break;
3481
3482                             case 0x10: /* Performance Control */
3483                             case 0x11: /* Performance Instrumentation
3484                                           Counter */
3485                             case 0x12: /* Dispatch Control */
3486 #endif
3487                             default:
3488                                 goto illegal_insn;
3489                             }
3490                         }
3491                         break;
3492 #if !defined(CONFIG_USER_ONLY)
3493                     case 0x31: /* wrpsr, V9 saved, restored */
3494                         {
3495                             if (!supervisor(dc))
3496                                 goto priv_insn;
3497 #ifdef TARGET_SPARC64
3498                             switch (rd) {
3499                             case 0:
3500                                 gen_helper_saved();
3501                                 break;
3502                             case 1:
3503                                 gen_helper_restored();
3504                                 break;
3505                             case 2: /* UA2005 allclean */
3506                             case 3: /* UA2005 otherw */
3507                             case 4: /* UA2005 normalw */
3508                             case 5: /* UA2005 invalw */
3509                                 // XXX
3510                             default:
3511                                 goto illegal_insn;
3512                             }
3513 #else
3514                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3515                             gen_helper_wrpsr(cpu_dst);
3516                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3517                             dc->cc_op = CC_OP_FLAGS;
3518                             save_state(dc, cpu_cond);
3519                             gen_op_next_insn();
3520                             tcg_gen_exit_tb(0);
3521                             dc->is_br = 1;
3522 #endif
3523                         }
3524                         break;
3525                     case 0x32: /* wrwim, V9 wrpr */
3526                         {
3527                             if (!supervisor(dc))
3528                                 goto priv_insn;
3529                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3530 #ifdef TARGET_SPARC64
3531                             switch (rd) {
3532                             case 0: // tpc
3533                                 {
3534                                     TCGv_ptr r_tsptr;
3535
3536                                     r_tsptr = tcg_temp_new_ptr();
3537                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3538                                                    offsetof(CPUState, tsptr));
3539                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3540                                                   offsetof(trap_state, tpc));
3541                                     tcg_temp_free_ptr(r_tsptr);
3542                                 }
3543                                 break;
3544                             case 1: // tnpc
3545                                 {
3546                                     TCGv_ptr r_tsptr;
3547
3548                                     r_tsptr = tcg_temp_new_ptr();
3549                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3550                                                    offsetof(CPUState, tsptr));
3551                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3552                                                   offsetof(trap_state, tnpc));
3553                                     tcg_temp_free_ptr(r_tsptr);
3554                                 }
3555                                 break;
3556                             case 2: // tstate
3557                                 {
3558                                     TCGv_ptr r_tsptr;
3559
3560                                     r_tsptr = tcg_temp_new_ptr();
3561                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3562                                                    offsetof(CPUState, tsptr));
3563                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3564                                                   offsetof(trap_state,
3565                                                            tstate));
3566                                     tcg_temp_free_ptr(r_tsptr);
3567                                 }
3568                                 break;
3569                             case 3: // tt
3570                                 {
3571                                     TCGv_ptr r_tsptr;
3572
3573                                     r_tsptr = tcg_temp_new_ptr();
3574                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3575                                                    offsetof(CPUState, tsptr));
3576                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3577                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3578                                                    offsetof(trap_state, tt));
3579                                     tcg_temp_free_ptr(r_tsptr);
3580                                 }
3581                                 break;
3582                             case 4: // tick
3583                                 {
3584                                     TCGv_ptr r_tickptr;
3585
3586                                     r_tickptr = tcg_temp_new_ptr();
3587                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3588                                                    offsetof(CPUState, tick));
3589                                     gen_helper_tick_set_count(r_tickptr,
3590                                                               cpu_tmp0);
3591                                     tcg_temp_free_ptr(r_tickptr);
3592                                 }
3593                                 break;
3594                             case 5: // tba
3595                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3596                                 break;
3597                             case 6: // pstate
3598                                 save_state(dc, cpu_cond);
3599                                 gen_helper_wrpstate(cpu_tmp0);
3600                                 gen_op_next_insn();
3601                                 tcg_gen_exit_tb(0);
3602                                 dc->is_br = 1;
3603                                 break;
3604                             case 7: // tl
3605                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3606                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3607                                                offsetof(CPUSPARCState, tl));
3608                                 break;
3609                             case 8: // pil
3610                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3611                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3612                                                offsetof(CPUSPARCState,
3613                                                         psrpil));
3614                                 break;
3615                             case 9: // cwp
3616                                 gen_helper_wrcwp(cpu_tmp0);
3617                                 break;
3618                             case 10: // cansave
3619                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3620                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3621                                                offsetof(CPUSPARCState,
3622                                                         cansave));
3623                                 break;
3624                             case 11: // canrestore
3625                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3626                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3627                                                offsetof(CPUSPARCState,
3628                                                         canrestore));
3629                                 break;
3630                             case 12: // cleanwin
3631                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3632                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3633                                                offsetof(CPUSPARCState,
3634                                                         cleanwin));
3635                                 break;
3636                             case 13: // otherwin
3637                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3638                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3639                                                offsetof(CPUSPARCState,
3640                                                         otherwin));
3641                                 break;
3642                             case 14: // wstate
3643                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3644                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3645                                                offsetof(CPUSPARCState,
3646                                                         wstate));
3647                                 break;
3648                             case 16: // UA2005 gl
3649                                 CHECK_IU_FEATURE(dc, GL);
3650                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3651                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3652                                                offsetof(CPUSPARCState, gl));
3653                                 break;
3654                             case 26: // UA2005 strand status
3655                                 CHECK_IU_FEATURE(dc, HYPV);
3656                                 if (!hypervisor(dc))
3657                                     goto priv_insn;
3658                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3659                                 break;
3660                             default:
3661                                 goto illegal_insn;
3662                             }
3663 #else
3664                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3665                             if (dc->def->nwindows != 32)
3666                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3667                                                 (1 << dc->def->nwindows) - 1);
3668                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3669 #endif
3670                         }
3671                         break;
3672                     case 0x33: /* wrtbr, UA2005 wrhpr */
3673                         {
3674 #ifndef TARGET_SPARC64
3675                             if (!supervisor(dc))
3676                                 goto priv_insn;
3677                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3678 #else
3679                             CHECK_IU_FEATURE(dc, HYPV);
3680                             if (!hypervisor(dc))
3681                                 goto priv_insn;
3682                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3683                             switch (rd) {
3684                             case 0: // hpstate
3685                                 // XXX gen_op_wrhpstate();
3686                                 save_state(dc, cpu_cond);
3687                                 gen_op_next_insn();
3688                                 tcg_gen_exit_tb(0);
3689                                 dc->is_br = 1;
3690                                 break;
3691                             case 1: // htstate
3692                                 // XXX gen_op_wrhtstate();
3693                                 break;
3694                             case 3: // hintp
3695                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3696                                 break;
3697                             case 5: // htba
3698                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3699                                 break;
3700                             case 31: // hstick_cmpr
3701                                 {
3702                                     TCGv_ptr r_tickptr;
3703
3704                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3705                                     r_tickptr = tcg_temp_new_ptr();
3706                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3707                                                    offsetof(CPUState, hstick));
3708                                     gen_helper_tick_set_limit(r_tickptr,
3709                                                               cpu_hstick_cmpr);
3710                                     tcg_temp_free_ptr(r_tickptr);
3711                                 }
3712                                 break;
3713                             case 6: // hver readonly
3714                             default:
3715                                 goto illegal_insn;
3716                             }
3717 #endif
3718                         }
3719                         break;
3720 #endif
3721 #ifdef TARGET_SPARC64
3722                     case 0x2c: /* V9 movcc */
3723                         {
3724                             int cc = GET_FIELD_SP(insn, 11, 12);
3725                             int cond = GET_FIELD_SP(insn, 14, 17);
3726                             TCGv r_cond;
3727                             int l1;
3728
3729                             r_cond = tcg_temp_new();
3730                             if (insn & (1 << 18)) {
3731                                 if (cc == 0)
3732                                     gen_cond(r_cond, 0, cond, dc);
3733                                 else if (cc == 2)
3734                                     gen_cond(r_cond, 1, cond, dc);
3735                                 else
3736                                     goto illegal_insn;
3737                             } else {
3738                                 gen_fcond(r_cond, cc, cond);
3739                             }
3740
3741                             l1 = gen_new_label();
3742
3743                             tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3744                             if (IS_IMM) {       /* immediate */
3745                                 TCGv r_const;
3746
3747                                 simm = GET_FIELD_SPs(insn, 0, 10);
3748                                 r_const = tcg_const_tl(simm);
3749                                 gen_movl_TN_reg(rd, r_const);
3750                                 tcg_temp_free(r_const);
3751                             } else {
3752                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3753                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3754                                 gen_movl_TN_reg(rd, cpu_tmp0);
3755                             }
3756                             gen_set_label(l1);
3757                             tcg_temp_free(r_cond);
3758                             break;
3759                         }
3760                     case 0x2d: /* V9 sdivx */
3761                         gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3762                         gen_movl_TN_reg(rd, cpu_dst);
3763                         break;
3764                     case 0x2e: /* V9 popc */
3765                         {
3766                             cpu_src2 = get_src2(insn, cpu_src2);
3767                             gen_helper_popc(cpu_dst, cpu_src2);
3768                             gen_movl_TN_reg(rd, cpu_dst);
3769                         }
3770                     case 0x2f: /* V9 movr */
3771                         {
3772                             int cond = GET_FIELD_SP(insn, 10, 12);
3773                             int l1;
3774
3775                             cpu_src1 = get_src1(insn, cpu_src1);
3776
3777                             l1 = gen_new_label();
3778
3779                             tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3780                                               cpu_src1, 0, l1);
3781                             if (IS_IMM) {       /* immediate */
3782                                 TCGv r_const;
3783
3784                                 simm = GET_FIELD_SPs(insn, 0, 9);
3785                                 r_const = tcg_const_tl(simm);
3786                                 gen_movl_TN_reg(rd, r_const);
3787                                 tcg_temp_free(r_const);
3788                             } else {
3789                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3790                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3791                                 gen_movl_TN_reg(rd, cpu_tmp0);
3792                             }
3793                             gen_set_label(l1);
3794                             break;
3795                         }
3796 #endif
3797                     default:
3798                         goto illegal_insn;
3799                     }
3800                 }
3801             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3802 #ifdef TARGET_SPARC64
3803                 int opf = GET_FIELD_SP(insn, 5, 13);
3804                 rs1 = GET_FIELD(insn, 13, 17);
3805                 rs2 = GET_FIELD(insn, 27, 31);
3806                 if (gen_trap_ifnofpu(dc, cpu_cond))
3807                     goto jmp_insn;
3808
3809                 switch (opf) {
3810                 case 0x000: /* VIS I edge8cc */
3811                 case 0x001: /* VIS II edge8n */
3812                 case 0x002: /* VIS I edge8lcc */
3813                 case 0x003: /* VIS II edge8ln */
3814                 case 0x004: /* VIS I edge16cc */
3815                 case 0x005: /* VIS II edge16n */
3816                 case 0x006: /* VIS I edge16lcc */
3817                 case 0x007: /* VIS II edge16ln */
3818                 case 0x008: /* VIS I edge32cc */
3819                 case 0x009: /* VIS II edge32n */
3820                 case 0x00a: /* VIS I edge32lcc */
3821                 case 0x00b: /* VIS II edge32ln */
3822                     // XXX
3823                     goto illegal_insn;
3824                 case 0x010: /* VIS I array8 */
3825                     CHECK_FPU_FEATURE(dc, VIS1);
3826                     cpu_src1 = get_src1(insn, cpu_src1);
3827                     gen_movl_reg_TN(rs2, cpu_src2);
3828                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3829                     gen_movl_TN_reg(rd, cpu_dst);
3830                     break;
3831                 case 0x012: /* VIS I array16 */
3832                     CHECK_FPU_FEATURE(dc, VIS1);
3833                     cpu_src1 = get_src1(insn, cpu_src1);
3834                     gen_movl_reg_TN(rs2, cpu_src2);
3835                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3836                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3837                     gen_movl_TN_reg(rd, cpu_dst);
3838                     break;
3839                 case 0x014: /* VIS I array32 */
3840                     CHECK_FPU_FEATURE(dc, VIS1);
3841                     cpu_src1 = get_src1(insn, cpu_src1);
3842                     gen_movl_reg_TN(rs2, cpu_src2);
3843                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3844                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3845                     gen_movl_TN_reg(rd, cpu_dst);
3846                     break;
3847                 case 0x018: /* VIS I alignaddr */
3848                     CHECK_FPU_FEATURE(dc, VIS1);
3849                     cpu_src1 = get_src1(insn, cpu_src1);
3850                     gen_movl_reg_TN(rs2, cpu_src2);
3851                     gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3852                     gen_movl_TN_reg(rd, cpu_dst);
3853                     break;
3854                 case 0x019: /* VIS II bmask */
3855                 case 0x01a: /* VIS I alignaddrl */
3856                     // XXX
3857                     goto illegal_insn;
3858                 case 0x020: /* VIS I fcmple16 */
3859                     CHECK_FPU_FEATURE(dc, VIS1);
3860                     gen_op_load_fpr_DT0(DFPREG(rs1));
3861                     gen_op_load_fpr_DT1(DFPREG(rs2));
3862                     gen_helper_fcmple16();
3863                     gen_op_store_DT0_fpr(DFPREG(rd));
3864                     break;
3865                 case 0x022: /* VIS I fcmpne16 */
3866                     CHECK_FPU_FEATURE(dc, VIS1);
3867                     gen_op_load_fpr_DT0(DFPREG(rs1));
3868                     gen_op_load_fpr_DT1(DFPREG(rs2));
3869                     gen_helper_fcmpne16();
3870                     gen_op_store_DT0_fpr(DFPREG(rd));
3871                     break;
3872                 case 0x024: /* VIS I fcmple32 */
3873                     CHECK_FPU_FEATURE(dc, VIS1);
3874                     gen_op_load_fpr_DT0(DFPREG(rs1));
3875                     gen_op_load_fpr_DT1(DFPREG(rs2));
3876                     gen_helper_fcmple32();
3877                     gen_op_store_DT0_fpr(DFPREG(rd));
3878                     break;
3879                 case 0x026: /* VIS I fcmpne32 */
3880                     CHECK_FPU_FEATURE(dc, VIS1);
3881                     gen_op_load_fpr_DT0(DFPREG(rs1));
3882                     gen_op_load_fpr_DT1(DFPREG(rs2));
3883                     gen_helper_fcmpne32();
3884                     gen_op_store_DT0_fpr(DFPREG(rd));
3885                     break;
3886                 case 0x028: /* VIS I fcmpgt16 */
3887                     CHECK_FPU_FEATURE(dc, VIS1);
3888                     gen_op_load_fpr_DT0(DFPREG(rs1));
3889                     gen_op_load_fpr_DT1(DFPREG(rs2));
3890                     gen_helper_fcmpgt16();
3891                     gen_op_store_DT0_fpr(DFPREG(rd));
3892                     break;
3893                 case 0x02a: /* VIS I fcmpeq16 */
3894                     CHECK_FPU_FEATURE(dc, VIS1);
3895                     gen_op_load_fpr_DT0(DFPREG(rs1));
3896                     gen_op_load_fpr_DT1(DFPREG(rs2));
3897                     gen_helper_fcmpeq16();
3898                     gen_op_store_DT0_fpr(DFPREG(rd));
3899                     break;
3900                 case 0x02c: /* VIS I fcmpgt32 */
3901                     CHECK_FPU_FEATURE(dc, VIS1);
3902                     gen_op_load_fpr_DT0(DFPREG(rs1));
3903                     gen_op_load_fpr_DT1(DFPREG(rs2));
3904                     gen_helper_fcmpgt32();
3905                     gen_op_store_DT0_fpr(DFPREG(rd));
3906                     break;
3907                 case 0x02e: /* VIS I fcmpeq32 */
3908                     CHECK_FPU_FEATURE(dc, VIS1);
3909                     gen_op_load_fpr_DT0(DFPREG(rs1));
3910                     gen_op_load_fpr_DT1(DFPREG(rs2));
3911                     gen_helper_fcmpeq32();
3912                     gen_op_store_DT0_fpr(DFPREG(rd));
3913                     break;
3914                 case 0x031: /* VIS I fmul8x16 */
3915                     CHECK_FPU_FEATURE(dc, VIS1);
3916                     gen_op_load_fpr_DT0(DFPREG(rs1));
3917                     gen_op_load_fpr_DT1(DFPREG(rs2));
3918                     gen_helper_fmul8x16();
3919                     gen_op_store_DT0_fpr(DFPREG(rd));
3920                     break;
3921                 case 0x033: /* VIS I fmul8x16au */
3922                     CHECK_FPU_FEATURE(dc, VIS1);
3923                     gen_op_load_fpr_DT0(DFPREG(rs1));
3924                     gen_op_load_fpr_DT1(DFPREG(rs2));
3925                     gen_helper_fmul8x16au();
3926                     gen_op_store_DT0_fpr(DFPREG(rd));
3927                     break;
3928                 case 0x035: /* VIS I fmul8x16al */
3929                     CHECK_FPU_FEATURE(dc, VIS1);
3930                     gen_op_load_fpr_DT0(DFPREG(rs1));
3931                     gen_op_load_fpr_DT1(DFPREG(rs2));
3932                     gen_helper_fmul8x16al();
3933                     gen_op_store_DT0_fpr(DFPREG(rd));
3934                     break;
3935                 case 0x036: /* VIS I fmul8sux16 */
3936                     CHECK_FPU_FEATURE(dc, VIS1);
3937                     gen_op_load_fpr_DT0(DFPREG(rs1));
3938                     gen_op_load_fpr_DT1(DFPREG(rs2));
3939                     gen_helper_fmul8sux16();
3940                     gen_op_store_DT0_fpr(DFPREG(rd));
3941                     break;
3942                 case 0x037: /* VIS I fmul8ulx16 */
3943                     CHECK_FPU_FEATURE(dc, VIS1);
3944                     gen_op_load_fpr_DT0(DFPREG(rs1));
3945                     gen_op_load_fpr_DT1(DFPREG(rs2));
3946                     gen_helper_fmul8ulx16();
3947                     gen_op_store_DT0_fpr(DFPREG(rd));
3948                     break;
3949                 case 0x038: /* VIS I fmuld8sux16 */
3950                     CHECK_FPU_FEATURE(dc, VIS1);
3951                     gen_op_load_fpr_DT0(DFPREG(rs1));
3952                     gen_op_load_fpr_DT1(DFPREG(rs2));
3953                     gen_helper_fmuld8sux16();
3954                     gen_op_store_DT0_fpr(DFPREG(rd));
3955                     break;
3956                 case 0x039: /* VIS I fmuld8ulx16 */
3957                     CHECK_FPU_FEATURE(dc, VIS1);
3958                     gen_op_load_fpr_DT0(DFPREG(rs1));
3959                     gen_op_load_fpr_DT1(DFPREG(rs2));
3960                     gen_helper_fmuld8ulx16();
3961                     gen_op_store_DT0_fpr(DFPREG(rd));
3962                     break;
3963                 case 0x03a: /* VIS I fpack32 */
3964                 case 0x03b: /* VIS I fpack16 */
3965                 case 0x03d: /* VIS I fpackfix */
3966                 case 0x03e: /* VIS I pdist */
3967                     // XXX
3968                     goto illegal_insn;
3969                 case 0x048: /* VIS I faligndata */
3970                     CHECK_FPU_FEATURE(dc, VIS1);
3971                     gen_op_load_fpr_DT0(DFPREG(rs1));
3972                     gen_op_load_fpr_DT1(DFPREG(rs2));
3973                     gen_helper_faligndata();
3974                     gen_op_store_DT0_fpr(DFPREG(rd));
3975                     break;
3976                 case 0x04b: /* VIS I fpmerge */
3977                     CHECK_FPU_FEATURE(dc, VIS1);
3978                     gen_op_load_fpr_DT0(DFPREG(rs1));
3979                     gen_op_load_fpr_DT1(DFPREG(rs2));
3980                     gen_helper_fpmerge();
3981                     gen_op_store_DT0_fpr(DFPREG(rd));
3982                     break;
3983                 case 0x04c: /* VIS II bshuffle */
3984                     // XXX
3985                     goto illegal_insn;
3986                 case 0x04d: /* VIS I fexpand */
3987                     CHECK_FPU_FEATURE(dc, VIS1);
3988                     gen_op_load_fpr_DT0(DFPREG(rs1));
3989                     gen_op_load_fpr_DT1(DFPREG(rs2));
3990                     gen_helper_fexpand();
3991                     gen_op_store_DT0_fpr(DFPREG(rd));
3992                     break;
3993                 case 0x050: /* VIS I fpadd16 */
3994                     CHECK_FPU_FEATURE(dc, VIS1);
3995                     gen_op_load_fpr_DT0(DFPREG(rs1));
3996                     gen_op_load_fpr_DT1(DFPREG(rs2));
3997                     gen_helper_fpadd16();
3998                     gen_op_store_DT0_fpr(DFPREG(rd));
3999                     break;
4000                 case 0x051: /* VIS I fpadd16s */
4001                     CHECK_FPU_FEATURE(dc, VIS1);
4002                     gen_helper_fpadd16s(cpu_fpr[rd],
4003                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4004                     break;
4005                 case 0x052: /* VIS I fpadd32 */
4006                     CHECK_FPU_FEATURE(dc, VIS1);
4007                     gen_op_load_fpr_DT0(DFPREG(rs1));
4008                     gen_op_load_fpr_DT1(DFPREG(rs2));
4009                     gen_helper_fpadd32();
4010                     gen_op_store_DT0_fpr(DFPREG(rd));
4011                     break;
4012                 case 0x053: /* VIS I fpadd32s */
4013                     CHECK_FPU_FEATURE(dc, VIS1);
4014                     gen_helper_fpadd32s(cpu_fpr[rd],
4015                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4016                     break;
4017                 case 0x054: /* VIS I fpsub16 */
4018                     CHECK_FPU_FEATURE(dc, VIS1);
4019                     gen_op_load_fpr_DT0(DFPREG(rs1));
4020                     gen_op_load_fpr_DT1(DFPREG(rs2));
4021                     gen_helper_fpsub16();
4022                     gen_op_store_DT0_fpr(DFPREG(rd));
4023                     break;
4024                 case 0x055: /* VIS I fpsub16s */
4025                     CHECK_FPU_FEATURE(dc, VIS1);
4026                     gen_helper_fpsub16s(cpu_fpr[rd],
4027                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4028                     break;
4029                 case 0x056: /* VIS I fpsub32 */
4030                     CHECK_FPU_FEATURE(dc, VIS1);
4031                     gen_op_load_fpr_DT0(DFPREG(rs1));
4032                     gen_op_load_fpr_DT1(DFPREG(rs2));
4033                     gen_helper_fpsub32();
4034                     gen_op_store_DT0_fpr(DFPREG(rd));
4035                     break;
4036                 case 0x057: /* VIS I fpsub32s */
4037                     CHECK_FPU_FEATURE(dc, VIS1);
4038                     gen_helper_fpsub32s(cpu_fpr[rd],
4039                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4040                     break;
4041                 case 0x060: /* VIS I fzero */
4042                     CHECK_FPU_FEATURE(dc, VIS1);
4043                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4044                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4045                     break;
4046                 case 0x061: /* VIS I fzeros */
4047                     CHECK_FPU_FEATURE(dc, VIS1);
4048                     tcg_gen_movi_i32(cpu_fpr[rd], 0);
4049                     break;
4050                 case 0x062: /* VIS I fnor */
4051                     CHECK_FPU_FEATURE(dc, VIS1);
4052                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4053                                     cpu_fpr[DFPREG(rs2)]);
4054                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4055                                     cpu_fpr[DFPREG(rs2) + 1]);
4056                     break;
4057                 case 0x063: /* VIS I fnors */
4058                     CHECK_FPU_FEATURE(dc, VIS1);
4059                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4060                     break;
4061                 case 0x064: /* VIS I fandnot2 */
4062                     CHECK_FPU_FEATURE(dc, VIS1);
4063                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4064                                      cpu_fpr[DFPREG(rs2)]);
4065                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4066                                      cpu_fpr[DFPREG(rs1) + 1],
4067                                      cpu_fpr[DFPREG(rs2) + 1]);
4068                     break;
4069                 case 0x065: /* VIS I fandnot2s */
4070                     CHECK_FPU_FEATURE(dc, VIS1);
4071                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4072                     break;
4073                 case 0x066: /* VIS I fnot2 */
4074                     CHECK_FPU_FEATURE(dc, VIS1);
4075                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4076                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4077                                     cpu_fpr[DFPREG(rs2) + 1]);
4078                     break;
4079                 case 0x067: /* VIS I fnot2s */
4080                     CHECK_FPU_FEATURE(dc, VIS1);
4081                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4082                     break;
4083                 case 0x068: /* VIS I fandnot1 */
4084                     CHECK_FPU_FEATURE(dc, VIS1);
4085                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4086                                      cpu_fpr[DFPREG(rs1)]);
4087                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4088                                      cpu_fpr[DFPREG(rs2) + 1],
4089                                      cpu_fpr[DFPREG(rs1) + 1]);
4090                     break;
4091                 case 0x069: /* VIS I fandnot1s */
4092                     CHECK_FPU_FEATURE(dc, VIS1);
4093                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4094                     break;
4095                 case 0x06a: /* VIS I fnot1 */
4096                     CHECK_FPU_FEATURE(dc, VIS1);
4097                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4098                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4099                                     cpu_fpr[DFPREG(rs1) + 1]);
4100                     break;
4101                 case 0x06b: /* VIS I fnot1s */
4102                     CHECK_FPU_FEATURE(dc, VIS1);
4103                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4104                     break;
4105                 case 0x06c: /* VIS I fxor */
4106                     CHECK_FPU_FEATURE(dc, VIS1);
4107                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4108                                     cpu_fpr[DFPREG(rs2)]);
4109                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4110                                     cpu_fpr[DFPREG(rs1) + 1],
4111                                     cpu_fpr[DFPREG(rs2) + 1]);
4112                     break;
4113                 case 0x06d: /* VIS I fxors */
4114                     CHECK_FPU_FEATURE(dc, VIS1);
4115                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4116                     break;
4117                 case 0x06e: /* VIS I fnand */
4118                     CHECK_FPU_FEATURE(dc, VIS1);
4119                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4120                                      cpu_fpr[DFPREG(rs2)]);
4121                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4122                                      cpu_fpr[DFPREG(rs2) + 1]);
4123                     break;
4124                 case 0x06f: /* VIS I fnands */
4125                     CHECK_FPU_FEATURE(dc, VIS1);
4126                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4127                     break;
4128                 case 0x070: /* VIS I fand */
4129                     CHECK_FPU_FEATURE(dc, VIS1);
4130                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4131                                     cpu_fpr[DFPREG(rs2)]);
4132                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4133                                     cpu_fpr[DFPREG(rs1) + 1],
4134                                     cpu_fpr[DFPREG(rs2) + 1]);
4135                     break;
4136                 case 0x071: /* VIS I fands */
4137                     CHECK_FPU_FEATURE(dc, VIS1);
4138                     tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4139                     break;
4140                 case 0x072: /* VIS I fxnor */
4141                     CHECK_FPU_FEATURE(dc, VIS1);
4142                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4143                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4144                                     cpu_fpr[DFPREG(rs1)]);
4145                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4146                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4147                                     cpu_fpr[DFPREG(rs1) + 1]);
4148                     break;
4149                 case 0x073: /* VIS I fxnors */
4150                     CHECK_FPU_FEATURE(dc, VIS1);
4151                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4152                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4153                     break;
4154                 case 0x074: /* VIS I fsrc1 */
4155                     CHECK_FPU_FEATURE(dc, VIS1);
4156                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4157                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4158                                     cpu_fpr[DFPREG(rs1) + 1]);
4159                     break;
4160                 case 0x075: /* VIS I fsrc1s */
4161                     CHECK_FPU_FEATURE(dc, VIS1);
4162                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4163                     break;
4164                 case 0x076: /* VIS I fornot2 */
4165                     CHECK_FPU_FEATURE(dc, VIS1);
4166                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4167                                     cpu_fpr[DFPREG(rs2)]);
4168                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4169                                     cpu_fpr[DFPREG(rs1) + 1],
4170                                     cpu_fpr[DFPREG(rs2) + 1]);
4171                     break;
4172                 case 0x077: /* VIS I fornot2s */
4173                     CHECK_FPU_FEATURE(dc, VIS1);
4174                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4175                     break;
4176                 case 0x078: /* VIS I fsrc2 */
4177                     CHECK_FPU_FEATURE(dc, VIS1);
4178                     gen_op_load_fpr_DT0(DFPREG(rs2));
4179                     gen_op_store_DT0_fpr(DFPREG(rd));
4180                     break;
4181                 case 0x079: /* VIS I fsrc2s */
4182                     CHECK_FPU_FEATURE(dc, VIS1);
4183                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4184                     break;
4185                 case 0x07a: /* VIS I fornot1 */
4186                     CHECK_FPU_FEATURE(dc, VIS1);
4187                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4188                                     cpu_fpr[DFPREG(rs1)]);
4189                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4190                                     cpu_fpr[DFPREG(rs2) + 1],
4191                                     cpu_fpr[DFPREG(rs1) + 1]);
4192                     break;
4193                 case 0x07b: /* VIS I fornot1s */
4194                     CHECK_FPU_FEATURE(dc, VIS1);
4195                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4196                     break;
4197                 case 0x07c: /* VIS I for */
4198                     CHECK_FPU_FEATURE(dc, VIS1);
4199                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4200                                    cpu_fpr[DFPREG(rs2)]);
4201                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4202                                    cpu_fpr[DFPREG(rs1) + 1],
4203                                    cpu_fpr[DFPREG(rs2) + 1]);
4204                     break;
4205                 case 0x07d: /* VIS I fors */
4206                     CHECK_FPU_FEATURE(dc, VIS1);
4207                     tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4208                     break;
4209                 case 0x07e: /* VIS I fone */
4210                     CHECK_FPU_FEATURE(dc, VIS1);
4211                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4212                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4213                     break;
4214                 case 0x07f: /* VIS I fones */
4215                     CHECK_FPU_FEATURE(dc, VIS1);
4216                     tcg_gen_movi_i32(cpu_fpr[rd], -1);
4217                     break;
4218                 case 0x080: /* VIS I shutdown */
4219                 case 0x081: /* VIS II siam */
4220                     // XXX
4221                     goto illegal_insn;
4222                 default:
4223                     goto illegal_insn;
4224                 }
4225 #else
4226                 goto ncp_insn;
4227 #endif
4228             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4229 #ifdef TARGET_SPARC64
4230                 goto illegal_insn;
4231 #else
4232                 goto ncp_insn;
4233 #endif
4234 #ifdef TARGET_SPARC64
4235             } else if (xop == 0x39) { /* V9 return */
4236                 TCGv_i32 r_const;
4237
4238                 save_state(dc, cpu_cond);
4239                 cpu_src1 = get_src1(insn, cpu_src1);
4240                 if (IS_IMM) {   /* immediate */
4241                     simm = GET_FIELDs(insn, 19, 31);
4242                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4243                 } else {                /* register */
4244                     rs2 = GET_FIELD(insn, 27, 31);
4245                     if (rs2) {
4246                         gen_movl_reg_TN(rs2, cpu_src2);
4247                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4248                     } else
4249                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4250                 }
4251                 gen_helper_restore();
4252                 gen_mov_pc_npc(dc, cpu_cond);
4253                 r_const = tcg_const_i32(3);
4254                 gen_helper_check_align(cpu_dst, r_const);
4255                 tcg_temp_free_i32(r_const);
4256                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4257                 dc->npc = DYNAMIC_PC;
4258                 goto jmp_insn;
4259 #endif
4260             } else {
4261                 cpu_src1 = get_src1(insn, cpu_src1);
4262                 if (IS_IMM) {   /* immediate */
4263                     simm = GET_FIELDs(insn, 19, 31);
4264                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4265                 } else {                /* register */
4266                     rs2 = GET_FIELD(insn, 27, 31);
4267                     if (rs2) {
4268                         gen_movl_reg_TN(rs2, cpu_src2);
4269                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4270                     } else
4271                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4272                 }
4273                 switch (xop) {
4274                 case 0x38:      /* jmpl */
4275                     {
4276                         TCGv r_pc;
4277                         TCGv_i32 r_const;
4278
4279                         r_pc = tcg_const_tl(dc->pc);
4280                         gen_movl_TN_reg(rd, r_pc);
4281                         tcg_temp_free(r_pc);
4282                         gen_mov_pc_npc(dc, cpu_cond);
4283                         r_const = tcg_const_i32(3);
4284                         gen_helper_check_align(cpu_dst, r_const);
4285                         tcg_temp_free_i32(r_const);
4286                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4287                         dc->npc = DYNAMIC_PC;
4288                     }
4289                     goto jmp_insn;
4290 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4291                 case 0x39:      /* rett, V9 return */
4292                     {
4293                         TCGv_i32 r_const;
4294
4295                         if (!supervisor(dc))
4296                             goto priv_insn;
4297                         gen_mov_pc_npc(dc, cpu_cond);
4298                         r_const = tcg_const_i32(3);
4299                         gen_helper_check_align(cpu_dst, r_const);
4300                         tcg_temp_free_i32(r_const);
4301                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4302                         dc->npc = DYNAMIC_PC;
4303                         gen_helper_rett();
4304                     }
4305                     goto jmp_insn;
4306 #endif
4307                 case 0x3b: /* flush */
4308                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4309                         goto unimp_flush;
4310                     gen_helper_flush(cpu_dst);
4311                     break;
4312                 case 0x3c:      /* save */
4313                     save_state(dc, cpu_cond);
4314                     gen_helper_save();
4315                     gen_movl_TN_reg(rd, cpu_dst);
4316                     break;
4317                 case 0x3d:      /* restore */
4318                     save_state(dc, cpu_cond);
4319                     gen_helper_restore();
4320                     gen_movl_TN_reg(rd, cpu_dst);
4321                     break;
4322 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4323                 case 0x3e:      /* V9 done/retry */
4324                     {
4325                         switch (rd) {
4326                         case 0:
4327                             if (!supervisor(dc))
4328                                 goto priv_insn;
4329                             dc->npc = DYNAMIC_PC;
4330                             dc->pc = DYNAMIC_PC;
4331                             gen_helper_done();
4332                             goto jmp_insn;
4333                         case 1:
4334                             if (!supervisor(dc))
4335                                 goto priv_insn;
4336                             dc->npc = DYNAMIC_PC;
4337                             dc->pc = DYNAMIC_PC;
4338                             gen_helper_retry();
4339                             goto jmp_insn;
4340                         default:
4341                             goto illegal_insn;
4342                         }
4343                     }
4344                     break;
4345 #endif
4346                 default:
4347                     goto illegal_insn;
4348                 }
4349             }
4350             break;
4351         }
4352         break;
4353     case 3:                     /* load/store instructions */
4354         {
4355             unsigned int xop = GET_FIELD(insn, 7, 12);
4356
4357             cpu_src1 = get_src1(insn, cpu_src1);
4358             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4359                 rs2 = GET_FIELD(insn, 27, 31);
4360                 gen_movl_reg_TN(rs2, cpu_src2);
4361                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4362             } else if (IS_IMM) {     /* immediate */
4363                 simm = GET_FIELDs(insn, 19, 31);
4364                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4365             } else {            /* register */
4366                 rs2 = GET_FIELD(insn, 27, 31);
4367                 if (rs2 != 0) {
4368                     gen_movl_reg_TN(rs2, cpu_src2);
4369                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4370                 } else
4371                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4372             }
4373             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4374                 (xop > 0x17 && xop <= 0x1d ) ||
4375                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4376                 switch (xop) {
4377                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4378                     gen_address_mask(dc, cpu_addr);
4379                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4380                     break;
4381                 case 0x1:       /* ldub, load unsigned byte */
4382                     gen_address_mask(dc, cpu_addr);
4383                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4384                     break;
4385                 case 0x2:       /* lduh, load unsigned halfword */
4386                     gen_address_mask(dc, cpu_addr);
4387                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4388                     break;
4389                 case 0x3:       /* ldd, load double word */
4390                     if (rd & 1)
4391                         goto illegal_insn;
4392                     else {
4393                         TCGv_i32 r_const;
4394
4395                         save_state(dc, cpu_cond);
4396                         r_const = tcg_const_i32(7);
4397                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4398                         tcg_temp_free_i32(r_const);
4399                         gen_address_mask(dc, cpu_addr);
4400                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4401                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4402                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4403                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4404                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4405                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4406                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4407                     }
4408                     break;
4409                 case 0x9:       /* ldsb, load signed byte */
4410                     gen_address_mask(dc, cpu_addr);
4411                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4412                     break;
4413                 case 0xa:       /* ldsh, load signed halfword */
4414                     gen_address_mask(dc, cpu_addr);
4415                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4416                     break;
4417                 case 0xd:       /* ldstub -- XXX: should be atomically */
4418                     {
4419                         TCGv r_const;
4420
4421                         gen_address_mask(dc, cpu_addr);
4422                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4423                         r_const = tcg_const_tl(0xff);
4424                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4425                         tcg_temp_free(r_const);
4426                     }
4427                     break;
4428                 case 0x0f:      /* swap, swap register with memory. Also
4429                                    atomically */
4430                     CHECK_IU_FEATURE(dc, SWAP);
4431                     gen_movl_reg_TN(rd, cpu_val);
4432                     gen_address_mask(dc, cpu_addr);
4433                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4434                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4435                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4436                     break;
4437 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4438                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4439 #ifndef TARGET_SPARC64
4440                     if (IS_IMM)
4441                         goto illegal_insn;
4442                     if (!supervisor(dc))
4443                         goto priv_insn;
4444 #endif
4445                     save_state(dc, cpu_cond);
4446                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4447                     break;
4448                 case 0x11:      /* lduba, load unsigned byte alternate */
4449 #ifndef TARGET_SPARC64
4450                     if (IS_IMM)
4451                         goto illegal_insn;
4452                     if (!supervisor(dc))
4453                         goto priv_insn;
4454 #endif
4455                     save_state(dc, cpu_cond);
4456                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4457                     break;
4458                 case 0x12:      /* lduha, load unsigned halfword alternate */
4459 #ifndef TARGET_SPARC64
4460                     if (IS_IMM)
4461                         goto illegal_insn;
4462                     if (!supervisor(dc))
4463                         goto priv_insn;
4464 #endif
4465                     save_state(dc, cpu_cond);
4466                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4467                     break;
4468                 case 0x13:      /* ldda, load double word alternate */
4469 #ifndef TARGET_SPARC64
4470                     if (IS_IMM)
4471                         goto illegal_insn;
4472                     if (!supervisor(dc))
4473                         goto priv_insn;
4474 #endif
4475                     if (rd & 1)
4476                         goto illegal_insn;
4477                     save_state(dc, cpu_cond);
4478                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4479                     goto skip_move;
4480                 case 0x19:      /* ldsba, load signed byte alternate */
4481 #ifndef TARGET_SPARC64
4482                     if (IS_IMM)
4483                         goto illegal_insn;
4484                     if (!supervisor(dc))
4485                         goto priv_insn;
4486 #endif
4487                     save_state(dc, cpu_cond);
4488                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4489                     break;
4490                 case 0x1a:      /* ldsha, load signed halfword alternate */
4491 #ifndef TARGET_SPARC64
4492                     if (IS_IMM)
4493                         goto illegal_insn;
4494                     if (!supervisor(dc))
4495                         goto priv_insn;
4496 #endif
4497                     save_state(dc, cpu_cond);
4498                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4499                     break;
4500                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4501 #ifndef TARGET_SPARC64
4502                     if (IS_IMM)
4503                         goto illegal_insn;
4504                     if (!supervisor(dc))
4505                         goto priv_insn;
4506 #endif
4507                     save_state(dc, cpu_cond);
4508                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4509                     break;
4510                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4511                                    atomically */
4512                     CHECK_IU_FEATURE(dc, SWAP);
4513 #ifndef TARGET_SPARC64
4514                     if (IS_IMM)
4515                         goto illegal_insn;
4516                     if (!supervisor(dc))
4517                         goto priv_insn;
4518 #endif
4519                     save_state(dc, cpu_cond);
4520                     gen_movl_reg_TN(rd, cpu_val);
4521                     gen_swap_asi(cpu_val, cpu_addr, insn);
4522                     break;
4523
4524 #ifndef TARGET_SPARC64
4525                 case 0x30: /* ldc */
4526                 case 0x31: /* ldcsr */
4527                 case 0x33: /* lddc */
4528                     goto ncp_insn;
4529 #endif
4530 #endif
4531 #ifdef TARGET_SPARC64
4532                 case 0x08: /* V9 ldsw */
4533                     gen_address_mask(dc, cpu_addr);
4534                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4535                     break;
4536                 case 0x0b: /* V9 ldx */
4537                     gen_address_mask(dc, cpu_addr);
4538                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4539                     break;
4540                 case 0x18: /* V9 ldswa */
4541                     save_state(dc, cpu_cond);
4542                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4543                     break;
4544                 case 0x1b: /* V9 ldxa */
4545                     save_state(dc, cpu_cond);
4546                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4547                     break;
4548                 case 0x2d: /* V9 prefetch, no effect */
4549                     goto skip_move;
4550                 case 0x30: /* V9 ldfa */
4551                     save_state(dc, cpu_cond);
4552                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4553                     goto skip_move;
4554                 case 0x33: /* V9 lddfa */
4555                     save_state(dc, cpu_cond);
4556                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4557                     goto skip_move;
4558                 case 0x3d: /* V9 prefetcha, no effect */
4559                     goto skip_move;
4560                 case 0x32: /* V9 ldqfa */
4561                     CHECK_FPU_FEATURE(dc, FLOAT128);
4562                     save_state(dc, cpu_cond);
4563                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4564                     goto skip_move;
4565 #endif
4566                 default:
4567                     goto illegal_insn;
4568                 }
4569                 gen_movl_TN_reg(rd, cpu_val);
4570 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4571             skip_move: ;
4572 #endif
4573             } else if (xop >= 0x20 && xop < 0x24) {
4574                 if (gen_trap_ifnofpu(dc, cpu_cond))
4575                     goto jmp_insn;
4576                 save_state(dc, cpu_cond);
4577                 switch (xop) {
4578                 case 0x20:      /* ldf, load fpreg */
4579                     gen_address_mask(dc, cpu_addr);
4580                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4581                     tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4582                     break;
4583                 case 0x21:      /* ldfsr, V9 ldxfsr */
4584 #ifdef TARGET_SPARC64
4585                     gen_address_mask(dc, cpu_addr);
4586                     if (rd == 1) {
4587                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4588                         gen_helper_ldxfsr(cpu_tmp64);
4589                     } else
4590 #else
4591                     {
4592                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4593                         gen_helper_ldfsr(cpu_tmp32);
4594                     }
4595 #endif
4596                     break;
4597                 case 0x22:      /* ldqf, load quad fpreg */
4598                     {
4599                         TCGv_i32 r_const;
4600
4601                         CHECK_FPU_FEATURE(dc, FLOAT128);
4602                         r_const = tcg_const_i32(dc->mem_idx);
4603                         gen_helper_ldqf(cpu_addr, r_const);
4604                         tcg_temp_free_i32(r_const);
4605                         gen_op_store_QT0_fpr(QFPREG(rd));
4606                     }
4607                     break;
4608                 case 0x23:      /* lddf, load double fpreg */
4609                     {
4610                         TCGv_i32 r_const;
4611
4612                         r_const = tcg_const_i32(dc->mem_idx);
4613                         gen_helper_lddf(cpu_addr, r_const);
4614                         tcg_temp_free_i32(r_const);
4615                         gen_op_store_DT0_fpr(DFPREG(rd));
4616                     }
4617                     break;
4618                 default:
4619                     goto illegal_insn;
4620                 }
4621             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4622                        xop == 0xe || xop == 0x1e) {
4623                 gen_movl_reg_TN(rd, cpu_val);
4624                 switch (xop) {
4625                 case 0x4: /* st, store word */
4626                     gen_address_mask(dc, cpu_addr);
4627                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4628                     break;
4629                 case 0x5: /* stb, store byte */
4630                     gen_address_mask(dc, cpu_addr);
4631                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4632                     break;
4633                 case 0x6: /* sth, store halfword */
4634                     gen_address_mask(dc, cpu_addr);
4635                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4636                     break;
4637                 case 0x7: /* std, store double word */
4638                     if (rd & 1)
4639                         goto illegal_insn;
4640                     else {
4641                         TCGv_i32 r_const;
4642
4643                         save_state(dc, cpu_cond);
4644                         gen_address_mask(dc, cpu_addr);
4645                         r_const = tcg_const_i32(7);
4646                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4647                         tcg_temp_free_i32(r_const);
4648                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4649                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4650                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4651                     }
4652                     break;
4653 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4654                 case 0x14: /* sta, V9 stwa, store word alternate */
4655 #ifndef TARGET_SPARC64
4656                     if (IS_IMM)
4657                         goto illegal_insn;
4658                     if (!supervisor(dc))
4659                         goto priv_insn;
4660 #endif
4661                     save_state(dc, cpu_cond);
4662                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4663                     break;
4664                 case 0x15: /* stba, store byte alternate */
4665 #ifndef TARGET_SPARC64
4666                     if (IS_IMM)
4667                         goto illegal_insn;
4668                     if (!supervisor(dc))
4669                         goto priv_insn;
4670 #endif
4671                     save_state(dc, cpu_cond);
4672                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4673                     break;
4674                 case 0x16: /* stha, store halfword alternate */
4675 #ifndef TARGET_SPARC64
4676                     if (IS_IMM)
4677                         goto illegal_insn;
4678                     if (!supervisor(dc))
4679                         goto priv_insn;
4680 #endif
4681                     save_state(dc, cpu_cond);
4682                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4683                     break;
4684                 case 0x17: /* stda, store double word alternate */
4685 #ifndef TARGET_SPARC64
4686                     if (IS_IMM)
4687                         goto illegal_insn;
4688                     if (!supervisor(dc))
4689                         goto priv_insn;
4690 #endif
4691                     if (rd & 1)
4692                         goto illegal_insn;
4693                     else {
4694                         save_state(dc, cpu_cond);
4695                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4696                     }
4697                     break;
4698 #endif
4699 #ifdef TARGET_SPARC64
4700                 case 0x0e: /* V9 stx */
4701                     gen_address_mask(dc, cpu_addr);
4702                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4703                     break;
4704                 case 0x1e: /* V9 stxa */
4705                     save_state(dc, cpu_cond);
4706                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4707                     break;
4708 #endif
4709                 default:
4710                     goto illegal_insn;
4711                 }
4712             } else if (xop > 0x23 && xop < 0x28) {
4713                 if (gen_trap_ifnofpu(dc, cpu_cond))
4714                     goto jmp_insn;
4715                 save_state(dc, cpu_cond);
4716                 switch (xop) {
4717                 case 0x24: /* stf, store fpreg */
4718                     gen_address_mask(dc, cpu_addr);
4719                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4720                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4721                     break;
4722                 case 0x25: /* stfsr, V9 stxfsr */
4723 #ifdef TARGET_SPARC64
4724                     gen_address_mask(dc, cpu_addr);
4725                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4726                     if (rd == 1)
4727                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4728                     else
4729                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4730 #else
4731                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4732                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4733 #endif
4734                     break;
4735                 case 0x26:
4736 #ifdef TARGET_SPARC64
4737                     /* V9 stqf, store quad fpreg */
4738                     {
4739                         TCGv_i32 r_const;
4740
4741                         CHECK_FPU_FEATURE(dc, FLOAT128);
4742                         gen_op_load_fpr_QT0(QFPREG(rd));
4743                         r_const = tcg_const_i32(dc->mem_idx);
4744                         gen_helper_stqf(cpu_addr, r_const);
4745                         tcg_temp_free_i32(r_const);
4746                     }
4747                     break;
4748 #else /* !TARGET_SPARC64 */
4749                     /* stdfq, store floating point queue */
4750 #if defined(CONFIG_USER_ONLY)
4751                     goto illegal_insn;
4752 #else
4753                     if (!supervisor(dc))
4754                         goto priv_insn;
4755                     if (gen_trap_ifnofpu(dc, cpu_cond))
4756                         goto jmp_insn;
4757                     goto nfq_insn;
4758 #endif
4759 #endif
4760                 case 0x27: /* stdf, store double fpreg */
4761                     {
4762                         TCGv_i32 r_const;
4763
4764                         gen_op_load_fpr_DT0(DFPREG(rd));
4765                         r_const = tcg_const_i32(dc->mem_idx);
4766                         gen_helper_stdf(cpu_addr, r_const);
4767                         tcg_temp_free_i32(r_const);
4768                     }
4769                     break;
4770                 default:
4771                     goto illegal_insn;
4772                 }
4773             } else if (xop > 0x33 && xop < 0x3f) {
4774                 save_state(dc, cpu_cond);
4775                 switch (xop) {
4776 #ifdef TARGET_SPARC64
4777                 case 0x34: /* V9 stfa */
4778                     gen_stf_asi(cpu_addr, insn, 4, rd);
4779                     break;
4780                 case 0x36: /* V9 stqfa */
4781                     {
4782                         TCGv_i32 r_const;
4783
4784                         CHECK_FPU_FEATURE(dc, FLOAT128);
4785                         r_const = tcg_const_i32(7);
4786                         gen_helper_check_align(cpu_addr, r_const);
4787                         tcg_temp_free_i32(r_const);
4788                         gen_op_load_fpr_QT0(QFPREG(rd));
4789                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4790                     }
4791                     break;
4792                 case 0x37: /* V9 stdfa */
4793                     gen_op_load_fpr_DT0(DFPREG(rd));
4794                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4795                     break;
4796                 case 0x3c: /* V9 casa */
4797                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4798                     gen_movl_TN_reg(rd, cpu_val);
4799                     break;
4800                 case 0x3e: /* V9 casxa */
4801                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4802                     gen_movl_TN_reg(rd, cpu_val);
4803                     break;
4804 #else
4805                 case 0x34: /* stc */
4806                 case 0x35: /* stcsr */
4807                 case 0x36: /* stdcq */
4808                 case 0x37: /* stdc */
4809                     goto ncp_insn;
4810 #endif
4811                 default:
4812                     goto illegal_insn;
4813                 }
4814             } else
4815                 goto illegal_insn;
4816         }
4817         break;
4818     }
4819     /* default case for non jump instructions */
4820     if (dc->npc == DYNAMIC_PC) {
4821         dc->pc = DYNAMIC_PC;
4822         gen_op_next_insn();
4823     } else if (dc->npc == JUMP_PC) {
4824         /* we can do a static jump */
4825         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4826         dc->is_br = 1;
4827     } else {
4828         dc->pc = dc->npc;
4829         dc->npc = dc->npc + 4;
4830     }
4831  jmp_insn:
4832     return;
4833  illegal_insn:
4834     {
4835         TCGv_i32 r_const;
4836
4837         save_state(dc, cpu_cond);
4838         r_const = tcg_const_i32(TT_ILL_INSN);
4839         gen_helper_raise_exception(r_const);
4840         tcg_temp_free_i32(r_const);
4841         dc->is_br = 1;
4842     }
4843     return;
4844  unimp_flush:
4845     {
4846         TCGv_i32 r_const;
4847
4848         save_state(dc, cpu_cond);
4849         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4850         gen_helper_raise_exception(r_const);
4851         tcg_temp_free_i32(r_const);
4852         dc->is_br = 1;
4853     }
4854     return;
4855 #if !defined(CONFIG_USER_ONLY)
4856  priv_insn:
4857     {
4858         TCGv_i32 r_const;
4859
4860         save_state(dc, cpu_cond);
4861         r_const = tcg_const_i32(TT_PRIV_INSN);
4862         gen_helper_raise_exception(r_const);
4863         tcg_temp_free_i32(r_const);
4864         dc->is_br = 1;
4865     }
4866     return;
4867 #endif
4868  nfpu_insn:
4869     save_state(dc, cpu_cond);
4870     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4871     dc->is_br = 1;
4872     return;
4873 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4874  nfq_insn:
4875     save_state(dc, cpu_cond);
4876     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4877     dc->is_br = 1;
4878     return;
4879 #endif
4880 #ifndef TARGET_SPARC64
4881  ncp_insn:
4882     {
4883         TCGv r_const;
4884
4885         save_state(dc, cpu_cond);
4886         r_const = tcg_const_i32(TT_NCP_INSN);
4887         gen_helper_raise_exception(r_const);
4888         tcg_temp_free(r_const);
4889         dc->is_br = 1;
4890     }
4891     return;
4892 #endif
4893 }
4894
4895 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4896                                                   int spc, CPUSPARCState *env)
4897 {
4898     target_ulong pc_start, last_pc;
4899     uint16_t *gen_opc_end;
4900     DisasContext dc1, *dc = &dc1;
4901     CPUBreakpoint *bp;
4902     int j, lj = -1;
4903     int num_insns;
4904     int max_insns;
4905
4906     memset(dc, 0, sizeof(DisasContext));
4907     dc->tb = tb;
4908     pc_start = tb->pc;
4909     dc->pc = pc_start;
4910     last_pc = dc->pc;
4911     dc->npc = (target_ulong) tb->cs_base;
4912     dc->cc_op = CC_OP_DYNAMIC;
4913     dc->mem_idx = cpu_mmu_index(env);
4914     dc->def = env->def;
4915     if ((dc->def->features & CPU_FEATURE_FLOAT))
4916         dc->fpu_enabled = cpu_fpu_enabled(env);
4917     else
4918         dc->fpu_enabled = 0;
4919 #ifdef TARGET_SPARC64
4920     dc->address_mask_32bit = env->pstate & PS_AM;
4921 #endif
4922     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4923
4924     cpu_tmp0 = tcg_temp_new();
4925     cpu_tmp32 = tcg_temp_new_i32();
4926     cpu_tmp64 = tcg_temp_new_i64();
4927
4928     cpu_dst = tcg_temp_local_new();
4929
4930     // loads and stores
4931     cpu_val = tcg_temp_local_new();
4932     cpu_addr = tcg_temp_local_new();
4933
4934     num_insns = 0;
4935     max_insns = tb->cflags & CF_COUNT_MASK;
4936     if (max_insns == 0)
4937         max_insns = CF_COUNT_MASK;
4938     gen_icount_start();
4939     do {
4940         if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4941             TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4942                 if (bp->pc == dc->pc) {
4943                     if (dc->pc != pc_start)
4944                         save_state(dc, cpu_cond);
4945                     gen_helper_debug();
4946                     tcg_gen_exit_tb(0);
4947                     dc->is_br = 1;
4948                     goto exit_gen_loop;
4949                 }
4950             }
4951         }
4952         if (spc) {
4953             qemu_log("Search PC...\n");
4954             j = gen_opc_ptr - gen_opc_buf;
4955             if (lj < j) {
4956                 lj++;
4957                 while (lj < j)
4958                     gen_opc_instr_start[lj++] = 0;
4959                 gen_opc_pc[lj] = dc->pc;
4960                 gen_opc_npc[lj] = dc->npc;
4961                 gen_opc_instr_start[lj] = 1;
4962                 gen_opc_icount[lj] = num_insns;
4963             }
4964         }
4965         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4966             gen_io_start();
4967         last_pc = dc->pc;
4968         disas_sparc_insn(dc);
4969         num_insns++;
4970
4971         if (dc->is_br)
4972             break;
4973         /* if the next PC is different, we abort now */
4974         if (dc->pc != (last_pc + 4))
4975             break;
4976         /* if we reach a page boundary, we stop generation so that the
4977            PC of a TT_TFAULT exception is always in the right page */
4978         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4979             break;
4980         /* if single step mode, we generate only one instruction and
4981            generate an exception */
4982         if (env->singlestep_enabled || singlestep) {
4983             tcg_gen_movi_tl(cpu_pc, dc->pc);
4984             tcg_gen_exit_tb(0);
4985             break;
4986         }
4987     } while ((gen_opc_ptr < gen_opc_end) &&
4988              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4989              num_insns < max_insns);
4990
4991  exit_gen_loop:
4992     tcg_temp_free(cpu_addr);
4993     tcg_temp_free(cpu_val);
4994     tcg_temp_free(cpu_dst);
4995     tcg_temp_free_i64(cpu_tmp64);
4996     tcg_temp_free_i32(cpu_tmp32);
4997     tcg_temp_free(cpu_tmp0);
4998     if (tb->cflags & CF_LAST_IO)
4999         gen_io_end();
5000     if (!dc->is_br) {
5001         if (dc->pc != DYNAMIC_PC &&
5002             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5003             /* static PC and NPC: we can use direct chaining */
5004             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5005         } else {
5006             if (dc->pc != DYNAMIC_PC)
5007                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5008             save_npc(dc, cpu_cond);
5009             tcg_gen_exit_tb(0);
5010         }
5011     }
5012     gen_icount_end(tb, num_insns);
5013     *gen_opc_ptr = INDEX_op_end;
5014     if (spc) {
5015         j = gen_opc_ptr - gen_opc_buf;
5016         lj++;
5017         while (lj <= j)
5018             gen_opc_instr_start[lj++] = 0;
5019 #if 0
5020         log_page_dump();
5021 #endif
5022         gen_opc_jump_pc[0] = dc->jump_pc[0];
5023         gen_opc_jump_pc[1] = dc->jump_pc[1];
5024     } else {
5025         tb->size = last_pc + 4 - pc_start;
5026         tb->icount = num_insns;
5027     }
5028 #ifdef DEBUG_DISAS
5029     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5030         qemu_log("--------------\n");
5031         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5032         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5033         qemu_log("\n");
5034     }
5035 #endif
5036 }
5037
5038 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5039 {
5040     gen_intermediate_code_internal(tb, 0, env);
5041 }
5042
5043 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5044 {
5045     gen_intermediate_code_internal(tb, 1, env);
5046 }
5047
5048 void gen_intermediate_code_init(CPUSPARCState *env)
5049 {
5050     unsigned int i;
5051     static int inited;
5052     static const char * const gregnames[8] = {
5053         NULL, // g0 not used
5054         "g1",
5055         "g2",
5056         "g3",
5057         "g4",
5058         "g5",
5059         "g6",
5060         "g7",
5061     };
5062     static const char * const fregnames[64] = {
5063         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5064         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5065         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5066         "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5067         "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5068         "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5069         "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5070         "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5071     };
5072
5073     /* init various static tables */
5074     if (!inited) {
5075         inited = 1;
5076
5077         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5078         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5079                                              offsetof(CPUState, regwptr),
5080                                              "regwptr");
5081 #ifdef TARGET_SPARC64
5082         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5083                                          "xcc");
5084         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5085                                          "asi");
5086         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5087                                           "fprs");
5088         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5089                                      "gsr");
5090         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5091                                            offsetof(CPUState, tick_cmpr),
5092                                            "tick_cmpr");
5093         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5094                                             offsetof(CPUState, stick_cmpr),
5095                                             "stick_cmpr");
5096         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5097                                              offsetof(CPUState, hstick_cmpr),
5098                                              "hstick_cmpr");
5099         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5100                                        "hintp");
5101         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5102                                       "htba");
5103         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5104                                       "hver");
5105         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5106                                      offsetof(CPUState, ssr), "ssr");
5107         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5108                                      offsetof(CPUState, version), "ver");
5109         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5110                                              offsetof(CPUState, softint),
5111                                              "softint");
5112 #else
5113         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5114                                      "wim");
5115 #endif
5116         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5117                                       "cond");
5118         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5119                                         "cc_src");
5120         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5121                                          offsetof(CPUState, cc_src2),
5122                                          "cc_src2");
5123         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5124                                         "cc_dst");
5125         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5126                                            "cc_op");
5127         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5128                                          "psr");
5129         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5130                                      "fsr");
5131         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5132                                     "pc");
5133         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5134                                      "npc");
5135         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5136 #ifndef CONFIG_USER_ONLY
5137         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5138                                      "tbr");
5139 #endif
5140         for (i = 1; i < 8; i++)
5141             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5142                                               offsetof(CPUState, gregs[i]),
5143                                               gregnames[i]);
5144         for (i = 0; i < TARGET_FPREGS; i++)
5145             cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5146                                                 offsetof(CPUState, fpr[i]),
5147                                                 fregnames[i]);
5148
5149         /* register helpers */
5150
5151 #define GEN_HELPER 2
5152 #include "helper.h"
5153     }
5154 }
5155
5156 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5157                 unsigned long searched_pc, int pc_pos, void *puc)
5158 {
5159     target_ulong npc;
5160     env->pc = gen_opc_pc[pc_pos];
5161     npc = gen_opc_npc[pc_pos];
5162     if (npc == 1) {
5163         /* dynamic NPC: already stored */
5164     } else if (npc == 2) {
5165         target_ulong t2 = (target_ulong)(unsigned long)puc;
5166         /* jump PC: use T2 and the jump targets of the translation */
5167         if (t2)
5168             env->npc = gen_opc_jump_pc[0];
5169         else
5170             env->npc = gen_opc_jump_pc[1];
5171     } else {
5172         env->npc = npc;
5173     }
5174 }