Convert sub
[qemu] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, write to the Free Software
19    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
20  */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define GEN_HELPER 1
35 #include "helper.h"
36
37 #define DEBUG_DISAS
38
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68
69 #include "gen-icount.h"
70
71 typedef struct DisasContext {
72     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75     int is_br;
76     int mem_idx;
77     int fpu_enabled;
78     int address_mask_32bit;
79     uint32_t cc_op;  /* current CC operation */
80     struct TranslationBlock *tb;
81     sparc_def_t *def;
82 } DisasContext;
83
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO)                                  \
86     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO)               \
90     GET_FIELD(X, 31 - (TO), 31 - (FROM))
91
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
102
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
105
106 static int sign_extend(int x, int len)
107 {
108     len = 32 - len;
109     return (x << len) >> len;
110 }
111
112 #define IS_IMM (insn & (1<<13))
113
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
116 {
117     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118                    offsetof(CPU_DoubleU, l.upper));
119     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120                    offsetof(CPU_DoubleU, l.lower));
121 }
122
123 static void gen_op_load_fpr_DT1(unsigned int src)
124 {
125     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126                    offsetof(CPU_DoubleU, l.upper));
127     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128                    offsetof(CPU_DoubleU, l.lower));
129 }
130
131 static void gen_op_store_DT0_fpr(unsigned int dst)
132 {
133     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134                    offsetof(CPU_DoubleU, l.upper));
135     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136                    offsetof(CPU_DoubleU, l.lower));
137 }
138
139 static void gen_op_load_fpr_QT0(unsigned int src)
140 {
141     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142                    offsetof(CPU_QuadU, l.upmost));
143     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144                    offsetof(CPU_QuadU, l.upper));
145     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146                    offsetof(CPU_QuadU, l.lower));
147     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148                    offsetof(CPU_QuadU, l.lowest));
149 }
150
151 static void gen_op_load_fpr_QT1(unsigned int src)
152 {
153     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154                    offsetof(CPU_QuadU, l.upmost));
155     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156                    offsetof(CPU_QuadU, l.upper));
157     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158                    offsetof(CPU_QuadU, l.lower));
159     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160                    offsetof(CPU_QuadU, l.lowest));
161 }
162
163 static void gen_op_store_QT0_fpr(unsigned int dst)
164 {
165     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166                    offsetof(CPU_QuadU, l.upmost));
167     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168                    offsetof(CPU_QuadU, l.upper));
169     tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170                    offsetof(CPU_QuadU, l.lower));
171     tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172                    offsetof(CPU_QuadU, l.lowest));
173 }
174
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
188
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
196
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 {
199 #ifdef TARGET_SPARC64
200     if (AM_CHECK(dc))
201         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
203 }
204
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 {
207     if (reg == 0)
208         tcg_gen_movi_tl(tn, 0);
209     else if (reg < 8)
210         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211     else {
212         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
213     }
214 }
215
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 {
218     if (reg == 0)
219         return;
220     else if (reg < 8)
221         tcg_gen_mov_tl(cpu_gregs[reg], tn);
222     else {
223         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224     }
225 }
226
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228                                target_ulong pc, target_ulong npc)
229 {
230     TranslationBlock *tb;
231
232     tb = s->tb;
233     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
235         /* jump to same page: we can use a direct jump */
236         tcg_gen_goto_tb(tb_num);
237         tcg_gen_movi_tl(cpu_pc, pc);
238         tcg_gen_movi_tl(cpu_npc, npc);
239         tcg_gen_exit_tb((long)tb + tb_num);
240     } else {
241         /* jump to another page: currently not optimized */
242         tcg_gen_movi_tl(cpu_pc, pc);
243         tcg_gen_movi_tl(cpu_npc, npc);
244         tcg_gen_exit_tb(0);
245     }
246 }
247
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 {
251     tcg_gen_extu_i32_tl(reg, src);
252     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253     tcg_gen_andi_tl(reg, reg, 0x1);
254 }
255
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 {
258     tcg_gen_extu_i32_tl(reg, src);
259     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260     tcg_gen_andi_tl(reg, reg, 0x1);
261 }
262
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 {
265     tcg_gen_extu_i32_tl(reg, src);
266     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267     tcg_gen_andi_tl(reg, reg, 0x1);
268 }
269
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 {
272     tcg_gen_extu_i32_tl(reg, src);
273     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274     tcg_gen_andi_tl(reg, reg, 0x1);
275 }
276
277 static inline void gen_cc_clear_icc(void)
278 {
279     tcg_gen_movi_i32(cpu_psr, 0);
280 }
281
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
284 {
285     tcg_gen_movi_i32(cpu_xcc, 0);
286 }
287 #endif
288
289 /* old op:
290     if (!T0)
291         env->psr |= PSR_ZERO;
292     if ((int32_t) T0 < 0)
293         env->psr |= PSR_NEG;
294 */
295 static inline void gen_cc_NZ_icc(TCGv dst)
296 {
297     TCGv r_temp;
298     int l1, l2;
299
300     l1 = gen_new_label();
301     l2 = gen_new_label();
302     r_temp = tcg_temp_new();
303     tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306     gen_set_label(l1);
307     tcg_gen_ext32s_tl(r_temp, dst);
308     tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310     gen_set_label(l2);
311     tcg_temp_free(r_temp);
312 }
313
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
316 {
317     int l1, l2;
318
319     l1 = gen_new_label();
320     l2 = gen_new_label();
321     tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323     gen_set_label(l1);
324     tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326     gen_set_label(l2);
327 }
328 #endif
329
330 /* old op:
331     if (T0 < src1)
332         env->psr |= PSR_CARRY;
333 */
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 {
336     TCGv r_temp1, r_temp2;
337     int l1;
338
339     l1 = gen_new_label();
340     r_temp1 = tcg_temp_new();
341     r_temp2 = tcg_temp_new();
342     tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343     tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346     gen_set_label(l1);
347     tcg_temp_free(r_temp1);
348     tcg_temp_free(r_temp2);
349 }
350
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 {
354     int l1;
355
356     l1 = gen_new_label();
357     tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359     gen_set_label(l1);
360 }
361 #endif
362
363 /* old op:
364     if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365         env->psr |= PSR_OVF;
366 */
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 {
369     TCGv r_temp;
370
371     r_temp = tcg_temp_new();
372     tcg_gen_xor_tl(r_temp, src1, src2);
373     tcg_gen_not_tl(r_temp, r_temp);
374     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379     tcg_temp_free(r_temp);
380     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
381 }
382
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 {
386     TCGv r_temp;
387
388     r_temp = tcg_temp_new();
389     tcg_gen_xor_tl(r_temp, src1, src2);
390     tcg_gen_not_tl(r_temp, r_temp);
391     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396     tcg_temp_free(r_temp);
397     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 }
399 #endif
400
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 {
403     TCGv r_temp;
404     TCGv_i32 r_const;
405     int l1;
406
407     l1 = gen_new_label();
408
409     r_temp = tcg_temp_new();
410     tcg_gen_xor_tl(r_temp, src1, src2);
411     tcg_gen_not_tl(r_temp, r_temp);
412     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416     r_const = tcg_const_i32(TT_TOVF);
417     gen_helper_raise_exception(r_const);
418     tcg_temp_free_i32(r_const);
419     gen_set_label(l1);
420     tcg_temp_free(r_temp);
421 }
422
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424 {
425     int l1;
426
427     l1 = gen_new_label();
428     tcg_gen_or_tl(cpu_tmp0, src1, src2);
429     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
432     gen_set_label(l1);
433 }
434
435 static inline void gen_tag_tv(TCGv src1, TCGv src2)
436 {
437     int l1;
438     TCGv_i32 r_const;
439
440     l1 = gen_new_label();
441     tcg_gen_or_tl(cpu_tmp0, src1, src2);
442     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
443     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
444     r_const = tcg_const_i32(TT_TOVF);
445     gen_helper_raise_exception(r_const);
446     tcg_temp_free_i32(r_const);
447     gen_set_label(l1);
448 }
449
450 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
451 {
452     tcg_gen_mov_tl(cpu_cc_src, src1);
453     tcg_gen_movi_tl(cpu_cc_src2, src2);
454     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
455     tcg_gen_mov_tl(dst, cpu_cc_dst);
456 }
457
458 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
459 {
460     tcg_gen_mov_tl(cpu_cc_src, src1);
461     tcg_gen_mov_tl(cpu_cc_src2, src2);
462     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463     tcg_gen_mov_tl(dst, cpu_cc_dst);
464 }
465
466 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
467 {
468     tcg_gen_mov_tl(cpu_cc_src, src1);
469     tcg_gen_movi_tl(cpu_cc_src2, src2);
470     gen_mov_reg_C(cpu_tmp0, cpu_psr);
471     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
472     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
473     tcg_gen_mov_tl(dst, cpu_cc_dst);
474 }
475
476 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
477 {
478     tcg_gen_mov_tl(cpu_cc_src, src1);
479     tcg_gen_mov_tl(cpu_cc_src2, src2);
480     gen_mov_reg_C(cpu_tmp0, cpu_psr);
481     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
482     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
483     tcg_gen_mov_tl(dst, cpu_cc_dst);
484 }
485
486 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     gen_cc_clear_icc();
492     gen_cc_NZ_icc(cpu_cc_dst);
493     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
494     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
495     gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
496 #ifdef TARGET_SPARC64
497     gen_cc_clear_xcc();
498     gen_cc_NZ_xcc(cpu_cc_dst);
499     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
500     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
501 #endif
502     tcg_gen_mov_tl(dst, cpu_cc_dst);
503 }
504
505 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
506 {
507     tcg_gen_mov_tl(cpu_cc_src, src1);
508     tcg_gen_mov_tl(cpu_cc_src2, src2);
509     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
510     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511     gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
512     gen_cc_clear_icc();
513     gen_cc_NZ_icc(cpu_cc_dst);
514     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515 #ifdef TARGET_SPARC64
516     gen_cc_clear_xcc();
517     gen_cc_NZ_xcc(cpu_cc_dst);
518     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
519     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
520 #endif
521     tcg_gen_mov_tl(dst, cpu_cc_dst);
522 }
523
524 /* old op:
525     if (src1 < T1)
526         env->psr |= PSR_CARRY;
527 */
528 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
529 {
530     TCGv r_temp1, r_temp2;
531     int l1;
532
533     l1 = gen_new_label();
534     r_temp1 = tcg_temp_new();
535     r_temp2 = tcg_temp_new();
536     tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
537     tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
538     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
539     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
540     gen_set_label(l1);
541     tcg_temp_free(r_temp1);
542     tcg_temp_free(r_temp2);
543 }
544
545 #ifdef TARGET_SPARC64
546 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
547 {
548     int l1;
549
550     l1 = gen_new_label();
551     tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
552     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
553     gen_set_label(l1);
554 }
555 #endif
556
557 /* old op:
558     if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
559         env->psr |= PSR_OVF;
560 */
561 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
562 {
563     TCGv r_temp;
564
565     r_temp = tcg_temp_new();
566     tcg_gen_xor_tl(r_temp, src1, src2);
567     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
568     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
569     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
570     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
571     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
572     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
573     tcg_temp_free(r_temp);
574 }
575
576 #ifdef TARGET_SPARC64
577 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
578 {
579     TCGv r_temp;
580
581     r_temp = tcg_temp_new();
582     tcg_gen_xor_tl(r_temp, src1, src2);
583     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
584     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
585     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
586     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
587     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
588     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
589     tcg_temp_free(r_temp);
590 }
591 #endif
592
593 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
594 {
595     TCGv r_temp;
596     TCGv_i32 r_const;
597     int l1;
598
599     l1 = gen_new_label();
600
601     r_temp = tcg_temp_new();
602     tcg_gen_xor_tl(r_temp, src1, src2);
603     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
604     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
605     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
606     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
607     r_const = tcg_const_i32(TT_TOVF);
608     gen_helper_raise_exception(r_const);
609     tcg_temp_free_i32(r_const);
610     gen_set_label(l1);
611     tcg_temp_free(r_temp);
612 }
613
614 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
615 {
616     tcg_gen_mov_tl(cpu_cc_src, src1);
617     tcg_gen_movi_tl(cpu_cc_src2, src2);
618     if (src2 == 0) {
619         tcg_gen_mov_tl(cpu_cc_dst, src1);
620         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
621         dc->cc_op = CC_OP_LOGIC;
622     } else {
623         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
624         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
625         dc->cc_op = CC_OP_SUB;
626     }
627     tcg_gen_mov_tl(dst, cpu_cc_dst);
628 }
629
630 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
631 {
632     tcg_gen_mov_tl(cpu_cc_src, src1);
633     tcg_gen_mov_tl(cpu_cc_src2, src2);
634     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
635     tcg_gen_mov_tl(dst, cpu_cc_dst);
636 }
637
638 static inline void gen_op_subx_cc2(TCGv dst)
639 {
640     gen_cc_NZ_icc(cpu_cc_dst);
641     gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
642     gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
643 #ifdef TARGET_SPARC64
644     gen_cc_NZ_xcc(cpu_cc_dst);
645     gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
646     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
647 #endif
648     tcg_gen_mov_tl(dst, cpu_cc_dst);
649 }
650
651 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
652 {
653     tcg_gen_mov_tl(cpu_cc_src, src1);
654     tcg_gen_movi_tl(cpu_cc_src2, src2);
655     gen_mov_reg_C(cpu_tmp0, cpu_psr);
656     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
657     gen_cc_clear_icc();
658     gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
659 #ifdef TARGET_SPARC64
660     gen_cc_clear_xcc();
661     gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
662 #endif
663     tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
664     gen_op_subx_cc2(dst);
665 }
666
667 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
668 {
669     tcg_gen_mov_tl(cpu_cc_src, src1);
670     tcg_gen_mov_tl(cpu_cc_src2, src2);
671     gen_mov_reg_C(cpu_tmp0, cpu_psr);
672     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
673     gen_cc_clear_icc();
674     gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
675 #ifdef TARGET_SPARC64
676     gen_cc_clear_xcc();
677     gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
678 #endif
679     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
680     gen_op_subx_cc2(dst);
681 }
682
683 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
684 {
685     tcg_gen_mov_tl(cpu_cc_src, src1);
686     tcg_gen_mov_tl(cpu_cc_src2, src2);
687     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
688     gen_cc_clear_icc();
689     gen_cc_NZ_icc(cpu_cc_dst);
690     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
691     gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
692     gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
693 #ifdef TARGET_SPARC64
694     gen_cc_clear_xcc();
695     gen_cc_NZ_xcc(cpu_cc_dst);
696     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
697     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
698 #endif
699     tcg_gen_mov_tl(dst, cpu_cc_dst);
700 }
701
702 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
703 {
704     tcg_gen_mov_tl(cpu_cc_src, src1);
705     tcg_gen_mov_tl(cpu_cc_src2, src2);
706     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
707     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
708     gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
709     gen_cc_clear_icc();
710     gen_cc_NZ_icc(cpu_cc_dst);
711     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
712 #ifdef TARGET_SPARC64
713     gen_cc_clear_xcc();
714     gen_cc_NZ_xcc(cpu_cc_dst);
715     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
716     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
717 #endif
718     tcg_gen_mov_tl(dst, cpu_cc_dst);
719 }
720
721 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
722 {
723     TCGv r_temp;
724     int l1;
725
726     l1 = gen_new_label();
727     r_temp = tcg_temp_new();
728
729     /* old op:
730     if (!(env->y & 1))
731         T1 = 0;
732     */
733     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
734     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
735     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
736     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
737     tcg_gen_movi_tl(cpu_cc_src2, 0);
738     gen_set_label(l1);
739
740     // b2 = T0 & 1;
741     // env->y = (b2 << 31) | (env->y >> 1);
742     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
743     tcg_gen_shli_tl(r_temp, r_temp, 31);
744     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
745     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
746     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
747     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
748
749     // b1 = N ^ V;
750     gen_mov_reg_N(cpu_tmp0, cpu_psr);
751     gen_mov_reg_V(r_temp, cpu_psr);
752     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
753     tcg_temp_free(r_temp);
754
755     // T0 = (b1 << 31) | (T0 >> 1);
756     // src1 = T0;
757     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
758     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
759     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
760
761     /* do addition and update flags */
762     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
763
764     gen_cc_clear_icc();
765     gen_cc_NZ_icc(cpu_cc_dst);
766     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
767     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
768     tcg_gen_mov_tl(dst, cpu_cc_dst);
769 }
770
771 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
772 {
773     TCGv_i64 r_temp, r_temp2;
774
775     r_temp = tcg_temp_new_i64();
776     r_temp2 = tcg_temp_new_i64();
777
778     tcg_gen_extu_tl_i64(r_temp, src2);
779     tcg_gen_extu_tl_i64(r_temp2, src1);
780     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
781
782     tcg_gen_shri_i64(r_temp, r_temp2, 32);
783     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
784     tcg_temp_free_i64(r_temp);
785     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
786 #ifdef TARGET_SPARC64
787     tcg_gen_mov_i64(dst, r_temp2);
788 #else
789     tcg_gen_trunc_i64_tl(dst, r_temp2);
790 #endif
791     tcg_temp_free_i64(r_temp2);
792 }
793
794 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
795 {
796     TCGv_i64 r_temp, r_temp2;
797
798     r_temp = tcg_temp_new_i64();
799     r_temp2 = tcg_temp_new_i64();
800
801     tcg_gen_ext_tl_i64(r_temp, src2);
802     tcg_gen_ext_tl_i64(r_temp2, src1);
803     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
804
805     tcg_gen_shri_i64(r_temp, r_temp2, 32);
806     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
807     tcg_temp_free_i64(r_temp);
808     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
809 #ifdef TARGET_SPARC64
810     tcg_gen_mov_i64(dst, r_temp2);
811 #else
812     tcg_gen_trunc_i64_tl(dst, r_temp2);
813 #endif
814     tcg_temp_free_i64(r_temp2);
815 }
816
817 #ifdef TARGET_SPARC64
818 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
819 {
820     TCGv_i32 r_const;
821     int l1;
822
823     l1 = gen_new_label();
824     tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
825     r_const = tcg_const_i32(TT_DIV_ZERO);
826     gen_helper_raise_exception(r_const);
827     tcg_temp_free_i32(r_const);
828     gen_set_label(l1);
829 }
830
831 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
832 {
833     int l1, l2;
834
835     l1 = gen_new_label();
836     l2 = gen_new_label();
837     tcg_gen_mov_tl(cpu_cc_src, src1);
838     tcg_gen_mov_tl(cpu_cc_src2, src2);
839     gen_trap_ifdivzero_tl(cpu_cc_src2);
840     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
841     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
842     tcg_gen_movi_i64(dst, INT64_MIN);
843     tcg_gen_br(l2);
844     gen_set_label(l1);
845     tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
846     gen_set_label(l2);
847 }
848 #endif
849
850 static inline void gen_op_div_cc(TCGv dst)
851 {
852     int l1;
853
854     tcg_gen_mov_tl(cpu_cc_dst, dst);
855     gen_cc_clear_icc();
856     gen_cc_NZ_icc(cpu_cc_dst);
857     l1 = gen_new_label();
858     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
859     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
860     gen_set_label(l1);
861 }
862
863 // 1
864 static inline void gen_op_eval_ba(TCGv dst)
865 {
866     tcg_gen_movi_tl(dst, 1);
867 }
868
869 // Z
870 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
871 {
872     gen_mov_reg_Z(dst, src);
873 }
874
875 // Z | (N ^ V)
876 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
877 {
878     gen_mov_reg_N(cpu_tmp0, src);
879     gen_mov_reg_V(dst, src);
880     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
881     gen_mov_reg_Z(cpu_tmp0, src);
882     tcg_gen_or_tl(dst, dst, cpu_tmp0);
883 }
884
885 // N ^ V
886 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
887 {
888     gen_mov_reg_V(cpu_tmp0, src);
889     gen_mov_reg_N(dst, src);
890     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
891 }
892
893 // C | Z
894 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
895 {
896     gen_mov_reg_Z(cpu_tmp0, src);
897     gen_mov_reg_C(dst, src);
898     tcg_gen_or_tl(dst, dst, cpu_tmp0);
899 }
900
901 // C
902 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
903 {
904     gen_mov_reg_C(dst, src);
905 }
906
907 // V
908 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
909 {
910     gen_mov_reg_V(dst, src);
911 }
912
913 // 0
914 static inline void gen_op_eval_bn(TCGv dst)
915 {
916     tcg_gen_movi_tl(dst, 0);
917 }
918
919 // N
920 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
921 {
922     gen_mov_reg_N(dst, src);
923 }
924
925 // !Z
926 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
927 {
928     gen_mov_reg_Z(dst, src);
929     tcg_gen_xori_tl(dst, dst, 0x1);
930 }
931
932 // !(Z | (N ^ V))
933 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
934 {
935     gen_mov_reg_N(cpu_tmp0, src);
936     gen_mov_reg_V(dst, src);
937     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
938     gen_mov_reg_Z(cpu_tmp0, src);
939     tcg_gen_or_tl(dst, dst, cpu_tmp0);
940     tcg_gen_xori_tl(dst, dst, 0x1);
941 }
942
943 // !(N ^ V)
944 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
945 {
946     gen_mov_reg_V(cpu_tmp0, src);
947     gen_mov_reg_N(dst, src);
948     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
949     tcg_gen_xori_tl(dst, dst, 0x1);
950 }
951
952 // !(C | Z)
953 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
954 {
955     gen_mov_reg_Z(cpu_tmp0, src);
956     gen_mov_reg_C(dst, src);
957     tcg_gen_or_tl(dst, dst, cpu_tmp0);
958     tcg_gen_xori_tl(dst, dst, 0x1);
959 }
960
961 // !C
962 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
963 {
964     gen_mov_reg_C(dst, src);
965     tcg_gen_xori_tl(dst, dst, 0x1);
966 }
967
968 // !N
969 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
970 {
971     gen_mov_reg_N(dst, src);
972     tcg_gen_xori_tl(dst, dst, 0x1);
973 }
974
975 // !V
976 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
977 {
978     gen_mov_reg_V(dst, src);
979     tcg_gen_xori_tl(dst, dst, 0x1);
980 }
981
982 /*
983   FPSR bit field FCC1 | FCC0:
984    0 =
985    1 <
986    2 >
987    3 unordered
988 */
989 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
990                                     unsigned int fcc_offset)
991 {
992     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
993     tcg_gen_andi_tl(reg, reg, 0x1);
994 }
995
996 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
997                                     unsigned int fcc_offset)
998 {
999     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1000     tcg_gen_andi_tl(reg, reg, 0x1);
1001 }
1002
1003 // !0: FCC0 | FCC1
1004 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1005                                     unsigned int fcc_offset)
1006 {
1007     gen_mov_reg_FCC0(dst, src, fcc_offset);
1008     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 }
1011
1012 // 1 or 2: FCC0 ^ FCC1
1013 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1014                                     unsigned int fcc_offset)
1015 {
1016     gen_mov_reg_FCC0(dst, src, fcc_offset);
1017     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1018     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1019 }
1020
1021 // 1 or 3: FCC0
1022 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1023                                     unsigned int fcc_offset)
1024 {
1025     gen_mov_reg_FCC0(dst, src, fcc_offset);
1026 }
1027
1028 // 1: FCC0 & !FCC1
1029 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1030                                     unsigned int fcc_offset)
1031 {
1032     gen_mov_reg_FCC0(dst, src, fcc_offset);
1033     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1034     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1035     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1036 }
1037
1038 // 2 or 3: FCC1
1039 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1040                                     unsigned int fcc_offset)
1041 {
1042     gen_mov_reg_FCC1(dst, src, fcc_offset);
1043 }
1044
1045 // 2: !FCC0 & FCC1
1046 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1047                                     unsigned int fcc_offset)
1048 {
1049     gen_mov_reg_FCC0(dst, src, fcc_offset);
1050     tcg_gen_xori_tl(dst, dst, 0x1);
1051     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1052     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1053 }
1054
1055 // 3: FCC0 & FCC1
1056 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1057                                     unsigned int fcc_offset)
1058 {
1059     gen_mov_reg_FCC0(dst, src, fcc_offset);
1060     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1061     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1062 }
1063
1064 // 0: !(FCC0 | FCC1)
1065 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1066                                     unsigned int fcc_offset)
1067 {
1068     gen_mov_reg_FCC0(dst, src, fcc_offset);
1069     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1070     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1071     tcg_gen_xori_tl(dst, dst, 0x1);
1072 }
1073
1074 // 0 or 3: !(FCC0 ^ FCC1)
1075 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1076                                     unsigned int fcc_offset)
1077 {
1078     gen_mov_reg_FCC0(dst, src, fcc_offset);
1079     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1080     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1081     tcg_gen_xori_tl(dst, dst, 0x1);
1082 }
1083
1084 // 0 or 2: !FCC0
1085 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1086                                     unsigned int fcc_offset)
1087 {
1088     gen_mov_reg_FCC0(dst, src, fcc_offset);
1089     tcg_gen_xori_tl(dst, dst, 0x1);
1090 }
1091
1092 // !1: !(FCC0 & !FCC1)
1093 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1094                                     unsigned int fcc_offset)
1095 {
1096     gen_mov_reg_FCC0(dst, src, fcc_offset);
1097     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1098     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1099     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1100     tcg_gen_xori_tl(dst, dst, 0x1);
1101 }
1102
1103 // 0 or 1: !FCC1
1104 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1105                                     unsigned int fcc_offset)
1106 {
1107     gen_mov_reg_FCC1(dst, src, fcc_offset);
1108     tcg_gen_xori_tl(dst, dst, 0x1);
1109 }
1110
1111 // !2: !(!FCC0 & FCC1)
1112 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1113                                     unsigned int fcc_offset)
1114 {
1115     gen_mov_reg_FCC0(dst, src, fcc_offset);
1116     tcg_gen_xori_tl(dst, dst, 0x1);
1117     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1118     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1119     tcg_gen_xori_tl(dst, dst, 0x1);
1120 }
1121
1122 // !3: !(FCC0 & FCC1)
1123 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1124                                     unsigned int fcc_offset)
1125 {
1126     gen_mov_reg_FCC0(dst, src, fcc_offset);
1127     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1128     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1129     tcg_gen_xori_tl(dst, dst, 0x1);
1130 }
1131
1132 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1133                                target_ulong pc2, TCGv r_cond)
1134 {
1135     int l1;
1136
1137     l1 = gen_new_label();
1138
1139     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1140
1141     gen_goto_tb(dc, 0, pc1, pc1 + 4);
1142
1143     gen_set_label(l1);
1144     gen_goto_tb(dc, 1, pc2, pc2 + 4);
1145 }
1146
1147 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1148                                 target_ulong pc2, TCGv r_cond)
1149 {
1150     int l1;
1151
1152     l1 = gen_new_label();
1153
1154     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1155
1156     gen_goto_tb(dc, 0, pc2, pc1);
1157
1158     gen_set_label(l1);
1159     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1160 }
1161
1162 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1163                                       TCGv r_cond)
1164 {
1165     int l1, l2;
1166
1167     l1 = gen_new_label();
1168     l2 = gen_new_label();
1169
1170     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1171
1172     tcg_gen_movi_tl(cpu_npc, npc1);
1173     tcg_gen_br(l2);
1174
1175     gen_set_label(l1);
1176     tcg_gen_movi_tl(cpu_npc, npc2);
1177     gen_set_label(l2);
1178 }
1179
1180 /* call this function before using the condition register as it may
1181    have been set for a jump */
1182 static inline void flush_cond(DisasContext *dc, TCGv cond)
1183 {
1184     if (dc->npc == JUMP_PC) {
1185         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1186         dc->npc = DYNAMIC_PC;
1187     }
1188 }
1189
1190 static inline void save_npc(DisasContext *dc, TCGv cond)
1191 {
1192     if (dc->npc == JUMP_PC) {
1193         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1194         dc->npc = DYNAMIC_PC;
1195     } else if (dc->npc != DYNAMIC_PC) {
1196         tcg_gen_movi_tl(cpu_npc, dc->npc);
1197     }
1198 }
1199
1200 static inline void save_state(DisasContext *dc, TCGv cond)
1201 {
1202     tcg_gen_movi_tl(cpu_pc, dc->pc);
1203     save_npc(dc, cond);
1204 }
1205
1206 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1207 {
1208     if (dc->npc == JUMP_PC) {
1209         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1210         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1211         dc->pc = DYNAMIC_PC;
1212     } else if (dc->npc == DYNAMIC_PC) {
1213         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1214         dc->pc = DYNAMIC_PC;
1215     } else {
1216         dc->pc = dc->npc;
1217     }
1218 }
1219
1220 static inline void gen_op_next_insn(void)
1221 {
1222     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1223     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1224 }
1225
1226 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1227                             DisasContext *dc)
1228 {
1229     TCGv_i32 r_src;
1230
1231 #ifdef TARGET_SPARC64
1232     if (cc)
1233         r_src = cpu_xcc;
1234     else
1235         r_src = cpu_psr;
1236 #else
1237     r_src = cpu_psr;
1238 #endif
1239     switch (dc->cc_op) {
1240     case CC_OP_FLAGS:
1241         break;
1242     default:
1243         gen_helper_compute_psr();
1244         dc->cc_op = CC_OP_FLAGS;
1245         break;
1246     }
1247     switch (cond) {
1248     case 0x0:
1249         gen_op_eval_bn(r_dst);
1250         break;
1251     case 0x1:
1252         gen_op_eval_be(r_dst, r_src);
1253         break;
1254     case 0x2:
1255         gen_op_eval_ble(r_dst, r_src);
1256         break;
1257     case 0x3:
1258         gen_op_eval_bl(r_dst, r_src);
1259         break;
1260     case 0x4:
1261         gen_op_eval_bleu(r_dst, r_src);
1262         break;
1263     case 0x5:
1264         gen_op_eval_bcs(r_dst, r_src);
1265         break;
1266     case 0x6:
1267         gen_op_eval_bneg(r_dst, r_src);
1268         break;
1269     case 0x7:
1270         gen_op_eval_bvs(r_dst, r_src);
1271         break;
1272     case 0x8:
1273         gen_op_eval_ba(r_dst);
1274         break;
1275     case 0x9:
1276         gen_op_eval_bne(r_dst, r_src);
1277         break;
1278     case 0xa:
1279         gen_op_eval_bg(r_dst, r_src);
1280         break;
1281     case 0xb:
1282         gen_op_eval_bge(r_dst, r_src);
1283         break;
1284     case 0xc:
1285         gen_op_eval_bgu(r_dst, r_src);
1286         break;
1287     case 0xd:
1288         gen_op_eval_bcc(r_dst, r_src);
1289         break;
1290     case 0xe:
1291         gen_op_eval_bpos(r_dst, r_src);
1292         break;
1293     case 0xf:
1294         gen_op_eval_bvc(r_dst, r_src);
1295         break;
1296     }
1297 }
1298
1299 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1300 {
1301     unsigned int offset;
1302
1303     switch (cc) {
1304     default:
1305     case 0x0:
1306         offset = 0;
1307         break;
1308     case 0x1:
1309         offset = 32 - 10;
1310         break;
1311     case 0x2:
1312         offset = 34 - 10;
1313         break;
1314     case 0x3:
1315         offset = 36 - 10;
1316         break;
1317     }
1318
1319     switch (cond) {
1320     case 0x0:
1321         gen_op_eval_bn(r_dst);
1322         break;
1323     case 0x1:
1324         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1325         break;
1326     case 0x2:
1327         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1328         break;
1329     case 0x3:
1330         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1331         break;
1332     case 0x4:
1333         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1334         break;
1335     case 0x5:
1336         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1337         break;
1338     case 0x6:
1339         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1340         break;
1341     case 0x7:
1342         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1343         break;
1344     case 0x8:
1345         gen_op_eval_ba(r_dst);
1346         break;
1347     case 0x9:
1348         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1349         break;
1350     case 0xa:
1351         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1352         break;
1353     case 0xb:
1354         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1355         break;
1356     case 0xc:
1357         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1358         break;
1359     case 0xd:
1360         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1361         break;
1362     case 0xe:
1363         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1364         break;
1365     case 0xf:
1366         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1367         break;
1368     }
1369 }
1370
1371 #ifdef TARGET_SPARC64
1372 // Inverted logic
1373 static const int gen_tcg_cond_reg[8] = {
1374     -1,
1375     TCG_COND_NE,
1376     TCG_COND_GT,
1377     TCG_COND_GE,
1378     -1,
1379     TCG_COND_EQ,
1380     TCG_COND_LE,
1381     TCG_COND_LT,
1382 };
1383
1384 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1385 {
1386     int l1;
1387
1388     l1 = gen_new_label();
1389     tcg_gen_movi_tl(r_dst, 0);
1390     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1391     tcg_gen_movi_tl(r_dst, 1);
1392     gen_set_label(l1);
1393 }
1394 #endif
1395
1396 /* XXX: potentially incorrect if dynamic npc */
1397 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1398                       TCGv r_cond)
1399 {
1400     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1401     target_ulong target = dc->pc + offset;
1402
1403     if (cond == 0x0) {
1404         /* unconditional not taken */
1405         if (a) {
1406             dc->pc = dc->npc + 4;
1407             dc->npc = dc->pc + 4;
1408         } else {
1409             dc->pc = dc->npc;
1410             dc->npc = dc->pc + 4;
1411         }
1412     } else if (cond == 0x8) {
1413         /* unconditional taken */
1414         if (a) {
1415             dc->pc = target;
1416             dc->npc = dc->pc + 4;
1417         } else {
1418             dc->pc = dc->npc;
1419             dc->npc = target;
1420         }
1421     } else {
1422         flush_cond(dc, r_cond);
1423         gen_cond(r_cond, cc, cond, dc);
1424         if (a) {
1425             gen_branch_a(dc, target, dc->npc, r_cond);
1426             dc->is_br = 1;
1427         } else {
1428             dc->pc = dc->npc;
1429             dc->jump_pc[0] = target;
1430             dc->jump_pc[1] = dc->npc + 4;
1431             dc->npc = JUMP_PC;
1432         }
1433     }
1434 }
1435
1436 /* XXX: potentially incorrect if dynamic npc */
1437 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1438                       TCGv r_cond)
1439 {
1440     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1441     target_ulong target = dc->pc + offset;
1442
1443     if (cond == 0x0) {
1444         /* unconditional not taken */
1445         if (a) {
1446             dc->pc = dc->npc + 4;
1447             dc->npc = dc->pc + 4;
1448         } else {
1449             dc->pc = dc->npc;
1450             dc->npc = dc->pc + 4;
1451         }
1452     } else if (cond == 0x8) {
1453         /* unconditional taken */
1454         if (a) {
1455             dc->pc = target;
1456             dc->npc = dc->pc + 4;
1457         } else {
1458             dc->pc = dc->npc;
1459             dc->npc = target;
1460         }
1461     } else {
1462         flush_cond(dc, r_cond);
1463         gen_fcond(r_cond, cc, cond);
1464         if (a) {
1465             gen_branch_a(dc, target, dc->npc, r_cond);
1466             dc->is_br = 1;
1467         } else {
1468             dc->pc = dc->npc;
1469             dc->jump_pc[0] = target;
1470             dc->jump_pc[1] = dc->npc + 4;
1471             dc->npc = JUMP_PC;
1472         }
1473     }
1474 }
1475
1476 #ifdef TARGET_SPARC64
1477 /* XXX: potentially incorrect if dynamic npc */
1478 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1479                           TCGv r_cond, TCGv r_reg)
1480 {
1481     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1482     target_ulong target = dc->pc + offset;
1483
1484     flush_cond(dc, r_cond);
1485     gen_cond_reg(r_cond, cond, r_reg);
1486     if (a) {
1487         gen_branch_a(dc, target, dc->npc, r_cond);
1488         dc->is_br = 1;
1489     } else {
1490         dc->pc = dc->npc;
1491         dc->jump_pc[0] = target;
1492         dc->jump_pc[1] = dc->npc + 4;
1493         dc->npc = JUMP_PC;
1494     }
1495 }
1496
1497 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1498 {
1499     switch (fccno) {
1500     case 0:
1501         gen_helper_fcmps(r_rs1, r_rs2);
1502         break;
1503     case 1:
1504         gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1505         break;
1506     case 2:
1507         gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1508         break;
1509     case 3:
1510         gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1511         break;
1512     }
1513 }
1514
1515 static inline void gen_op_fcmpd(int fccno)
1516 {
1517     switch (fccno) {
1518     case 0:
1519         gen_helper_fcmpd();
1520         break;
1521     case 1:
1522         gen_helper_fcmpd_fcc1();
1523         break;
1524     case 2:
1525         gen_helper_fcmpd_fcc2();
1526         break;
1527     case 3:
1528         gen_helper_fcmpd_fcc3();
1529         break;
1530     }
1531 }
1532
1533 static inline void gen_op_fcmpq(int fccno)
1534 {
1535     switch (fccno) {
1536     case 0:
1537         gen_helper_fcmpq();
1538         break;
1539     case 1:
1540         gen_helper_fcmpq_fcc1();
1541         break;
1542     case 2:
1543         gen_helper_fcmpq_fcc2();
1544         break;
1545     case 3:
1546         gen_helper_fcmpq_fcc3();
1547         break;
1548     }
1549 }
1550
1551 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1552 {
1553     switch (fccno) {
1554     case 0:
1555         gen_helper_fcmpes(r_rs1, r_rs2);
1556         break;
1557     case 1:
1558         gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1559         break;
1560     case 2:
1561         gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1562         break;
1563     case 3:
1564         gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1565         break;
1566     }
1567 }
1568
1569 static inline void gen_op_fcmped(int fccno)
1570 {
1571     switch (fccno) {
1572     case 0:
1573         gen_helper_fcmped();
1574         break;
1575     case 1:
1576         gen_helper_fcmped_fcc1();
1577         break;
1578     case 2:
1579         gen_helper_fcmped_fcc2();
1580         break;
1581     case 3:
1582         gen_helper_fcmped_fcc3();
1583         break;
1584     }
1585 }
1586
1587 static inline void gen_op_fcmpeq(int fccno)
1588 {
1589     switch (fccno) {
1590     case 0:
1591         gen_helper_fcmpeq();
1592         break;
1593     case 1:
1594         gen_helper_fcmpeq_fcc1();
1595         break;
1596     case 2:
1597         gen_helper_fcmpeq_fcc2();
1598         break;
1599     case 3:
1600         gen_helper_fcmpeq_fcc3();
1601         break;
1602     }
1603 }
1604
1605 #else
1606
1607 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1608 {
1609     gen_helper_fcmps(r_rs1, r_rs2);
1610 }
1611
1612 static inline void gen_op_fcmpd(int fccno)
1613 {
1614     gen_helper_fcmpd();
1615 }
1616
1617 static inline void gen_op_fcmpq(int fccno)
1618 {
1619     gen_helper_fcmpq();
1620 }
1621
1622 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1623 {
1624     gen_helper_fcmpes(r_rs1, r_rs2);
1625 }
1626
1627 static inline void gen_op_fcmped(int fccno)
1628 {
1629     gen_helper_fcmped();
1630 }
1631
1632 static inline void gen_op_fcmpeq(int fccno)
1633 {
1634     gen_helper_fcmpeq();
1635 }
1636 #endif
1637
1638 static inline void gen_op_fpexception_im(int fsr_flags)
1639 {
1640     TCGv_i32 r_const;
1641
1642     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1643     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1644     r_const = tcg_const_i32(TT_FP_EXCP);
1645     gen_helper_raise_exception(r_const);
1646     tcg_temp_free_i32(r_const);
1647 }
1648
1649 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1650 {
1651 #if !defined(CONFIG_USER_ONLY)
1652     if (!dc->fpu_enabled) {
1653         TCGv_i32 r_const;
1654
1655         save_state(dc, r_cond);
1656         r_const = tcg_const_i32(TT_NFPU_INSN);
1657         gen_helper_raise_exception(r_const);
1658         tcg_temp_free_i32(r_const);
1659         dc->is_br = 1;
1660         return 1;
1661     }
1662 #endif
1663     return 0;
1664 }
1665
1666 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1667 {
1668     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1669 }
1670
1671 static inline void gen_clear_float_exceptions(void)
1672 {
1673     gen_helper_clear_float_exceptions();
1674 }
1675
1676 /* asi moves */
1677 #ifdef TARGET_SPARC64
1678 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1679 {
1680     int asi;
1681     TCGv_i32 r_asi;
1682
1683     if (IS_IMM) {
1684         r_asi = tcg_temp_new_i32();
1685         tcg_gen_mov_i32(r_asi, cpu_asi);
1686     } else {
1687         asi = GET_FIELD(insn, 19, 26);
1688         r_asi = tcg_const_i32(asi);
1689     }
1690     return r_asi;
1691 }
1692
1693 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1694                               int sign)
1695 {
1696     TCGv_i32 r_asi, r_size, r_sign;
1697
1698     r_asi = gen_get_asi(insn, addr);
1699     r_size = tcg_const_i32(size);
1700     r_sign = tcg_const_i32(sign);
1701     gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1702     tcg_temp_free_i32(r_sign);
1703     tcg_temp_free_i32(r_size);
1704     tcg_temp_free_i32(r_asi);
1705 }
1706
1707 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1708 {
1709     TCGv_i32 r_asi, r_size;
1710
1711     r_asi = gen_get_asi(insn, addr);
1712     r_size = tcg_const_i32(size);
1713     gen_helper_st_asi(addr, src, r_asi, r_size);
1714     tcg_temp_free_i32(r_size);
1715     tcg_temp_free_i32(r_asi);
1716 }
1717
1718 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1719 {
1720     TCGv_i32 r_asi, r_size, r_rd;
1721
1722     r_asi = gen_get_asi(insn, addr);
1723     r_size = tcg_const_i32(size);
1724     r_rd = tcg_const_i32(rd);
1725     gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1726     tcg_temp_free_i32(r_rd);
1727     tcg_temp_free_i32(r_size);
1728     tcg_temp_free_i32(r_asi);
1729 }
1730
1731 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1732 {
1733     TCGv_i32 r_asi, r_size, r_rd;
1734
1735     r_asi = gen_get_asi(insn, addr);
1736     r_size = tcg_const_i32(size);
1737     r_rd = tcg_const_i32(rd);
1738     gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1739     tcg_temp_free_i32(r_rd);
1740     tcg_temp_free_i32(r_size);
1741     tcg_temp_free_i32(r_asi);
1742 }
1743
1744 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1745 {
1746     TCGv_i32 r_asi, r_size, r_sign;
1747
1748     r_asi = gen_get_asi(insn, addr);
1749     r_size = tcg_const_i32(4);
1750     r_sign = tcg_const_i32(0);
1751     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1752     tcg_temp_free_i32(r_sign);
1753     gen_helper_st_asi(addr, dst, r_asi, r_size);
1754     tcg_temp_free_i32(r_size);
1755     tcg_temp_free_i32(r_asi);
1756     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1757 }
1758
1759 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1760 {
1761     TCGv_i32 r_asi, r_rd;
1762
1763     r_asi = gen_get_asi(insn, addr);
1764     r_rd = tcg_const_i32(rd);
1765     gen_helper_ldda_asi(addr, r_asi, r_rd);
1766     tcg_temp_free_i32(r_rd);
1767     tcg_temp_free_i32(r_asi);
1768 }
1769
1770 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1771 {
1772     TCGv_i32 r_asi, r_size;
1773
1774     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1775     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1776     r_asi = gen_get_asi(insn, addr);
1777     r_size = tcg_const_i32(8);
1778     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1779     tcg_temp_free_i32(r_size);
1780     tcg_temp_free_i32(r_asi);
1781 }
1782
1783 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1784                                int rd)
1785 {
1786     TCGv r_val1;
1787     TCGv_i32 r_asi;
1788
1789     r_val1 = tcg_temp_new();
1790     gen_movl_reg_TN(rd, r_val1);
1791     r_asi = gen_get_asi(insn, addr);
1792     gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1793     tcg_temp_free_i32(r_asi);
1794     tcg_temp_free(r_val1);
1795 }
1796
1797 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1798                                 int rd)
1799 {
1800     TCGv_i32 r_asi;
1801
1802     gen_movl_reg_TN(rd, cpu_tmp64);
1803     r_asi = gen_get_asi(insn, addr);
1804     gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1805     tcg_temp_free_i32(r_asi);
1806 }
1807
1808 #elif !defined(CONFIG_USER_ONLY)
1809
1810 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1811                               int sign)
1812 {
1813     TCGv_i32 r_asi, r_size, r_sign;
1814
1815     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1816     r_size = tcg_const_i32(size);
1817     r_sign = tcg_const_i32(sign);
1818     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1819     tcg_temp_free(r_sign);
1820     tcg_temp_free(r_size);
1821     tcg_temp_free(r_asi);
1822     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1823 }
1824
1825 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1826 {
1827     TCGv_i32 r_asi, r_size;
1828
1829     tcg_gen_extu_tl_i64(cpu_tmp64, src);
1830     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1831     r_size = tcg_const_i32(size);
1832     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1833     tcg_temp_free(r_size);
1834     tcg_temp_free(r_asi);
1835 }
1836
1837 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1838 {
1839     TCGv_i32 r_asi, r_size, r_sign;
1840     TCGv_i64 r_val;
1841
1842     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1843     r_size = tcg_const_i32(4);
1844     r_sign = tcg_const_i32(0);
1845     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1846     tcg_temp_free(r_sign);
1847     r_val = tcg_temp_new_i64();
1848     tcg_gen_extu_tl_i64(r_val, dst);
1849     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1850     tcg_temp_free_i64(r_val);
1851     tcg_temp_free(r_size);
1852     tcg_temp_free(r_asi);
1853     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1854 }
1855
1856 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1857 {
1858     TCGv_i32 r_asi, r_size, r_sign;
1859
1860     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1861     r_size = tcg_const_i32(8);
1862     r_sign = tcg_const_i32(0);
1863     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1864     tcg_temp_free(r_sign);
1865     tcg_temp_free(r_size);
1866     tcg_temp_free(r_asi);
1867     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1868     gen_movl_TN_reg(rd + 1, cpu_tmp0);
1869     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1870     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1871     gen_movl_TN_reg(rd, hi);
1872 }
1873
1874 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1875 {
1876     TCGv_i32 r_asi, r_size;
1877
1878     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1879     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1880     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1881     r_size = tcg_const_i32(8);
1882     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1883     tcg_temp_free(r_size);
1884     tcg_temp_free(r_asi);
1885 }
1886 #endif
1887
1888 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1889 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1890 {
1891     TCGv_i64 r_val;
1892     TCGv_i32 r_asi, r_size;
1893
1894     gen_ld_asi(dst, addr, insn, 1, 0);
1895
1896     r_val = tcg_const_i64(0xffULL);
1897     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1898     r_size = tcg_const_i32(1);
1899     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1900     tcg_temp_free_i32(r_size);
1901     tcg_temp_free_i32(r_asi);
1902     tcg_temp_free_i64(r_val);
1903 }
1904 #endif
1905
1906 static inline TCGv get_src1(unsigned int insn, TCGv def)
1907 {
1908     TCGv r_rs1 = def;
1909     unsigned int rs1;
1910
1911     rs1 = GET_FIELD(insn, 13, 17);
1912     if (rs1 == 0)
1913         r_rs1 = tcg_const_tl(0); // XXX how to free?
1914     else if (rs1 < 8)
1915         r_rs1 = cpu_gregs[rs1];
1916     else
1917         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1918     return r_rs1;
1919 }
1920
1921 static inline TCGv get_src2(unsigned int insn, TCGv def)
1922 {
1923     TCGv r_rs2 = def;
1924
1925     if (IS_IMM) { /* immediate */
1926         target_long simm;
1927
1928         simm = GET_FIELDs(insn, 19, 31);
1929         r_rs2 = tcg_const_tl(simm); // XXX how to free?
1930     } else { /* register */
1931         unsigned int rs2;
1932
1933         rs2 = GET_FIELD(insn, 27, 31);
1934         if (rs2 == 0)
1935             r_rs2 = tcg_const_tl(0); // XXX how to free?
1936         else if (rs2 < 8)
1937             r_rs2 = cpu_gregs[rs2];
1938         else
1939             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1940     }
1941     return r_rs2;
1942 }
1943
1944 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
1945     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1946         goto illegal_insn;
1947 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1948     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1949         goto nfpu_insn;
1950
1951 /* before an instruction, dc->pc must be static */
1952 static void disas_sparc_insn(DisasContext * dc)
1953 {
1954     unsigned int insn, opc, rs1, rs2, rd;
1955     target_long simm;
1956
1957     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1958         tcg_gen_debug_insn_start(dc->pc);
1959     insn = ldl_code(dc->pc);
1960     opc = GET_FIELD(insn, 0, 1);
1961
1962     rd = GET_FIELD(insn, 2, 6);
1963
1964     cpu_src1 = tcg_temp_new(); // const
1965     cpu_src2 = tcg_temp_new(); // const
1966
1967     switch (opc) {
1968     case 0:                     /* branches/sethi */
1969         {
1970             unsigned int xop = GET_FIELD(insn, 7, 9);
1971             int32_t target;
1972             switch (xop) {
1973 #ifdef TARGET_SPARC64
1974             case 0x1:           /* V9 BPcc */
1975                 {
1976                     int cc;
1977
1978                     target = GET_FIELD_SP(insn, 0, 18);
1979                     target = sign_extend(target, 18);
1980                     target <<= 2;
1981                     cc = GET_FIELD_SP(insn, 20, 21);
1982                     if (cc == 0)
1983                         do_branch(dc, target, insn, 0, cpu_cond);
1984                     else if (cc == 2)
1985                         do_branch(dc, target, insn, 1, cpu_cond);
1986                     else
1987                         goto illegal_insn;
1988                     goto jmp_insn;
1989                 }
1990             case 0x3:           /* V9 BPr */
1991                 {
1992                     target = GET_FIELD_SP(insn, 0, 13) |
1993                         (GET_FIELD_SP(insn, 20, 21) << 14);
1994                     target = sign_extend(target, 16);
1995                     target <<= 2;
1996                     cpu_src1 = get_src1(insn, cpu_src1);
1997                     do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1998                     goto jmp_insn;
1999                 }
2000             case 0x5:           /* V9 FBPcc */
2001                 {
2002                     int cc = GET_FIELD_SP(insn, 20, 21);
2003                     if (gen_trap_ifnofpu(dc, cpu_cond))
2004                         goto jmp_insn;
2005                     target = GET_FIELD_SP(insn, 0, 18);
2006                     target = sign_extend(target, 19);
2007                     target <<= 2;
2008                     do_fbranch(dc, target, insn, cc, cpu_cond);
2009                     goto jmp_insn;
2010                 }
2011 #else
2012             case 0x7:           /* CBN+x */
2013                 {
2014                     goto ncp_insn;
2015                 }
2016 #endif
2017             case 0x2:           /* BN+x */
2018                 {
2019                     target = GET_FIELD(insn, 10, 31);
2020                     target = sign_extend(target, 22);
2021                     target <<= 2;
2022                     do_branch(dc, target, insn, 0, cpu_cond);
2023                     goto jmp_insn;
2024                 }
2025             case 0x6:           /* FBN+x */
2026                 {
2027                     if (gen_trap_ifnofpu(dc, cpu_cond))
2028                         goto jmp_insn;
2029                     target = GET_FIELD(insn, 10, 31);
2030                     target = sign_extend(target, 22);
2031                     target <<= 2;
2032                     do_fbranch(dc, target, insn, 0, cpu_cond);
2033                     goto jmp_insn;
2034                 }
2035             case 0x4:           /* SETHI */
2036                 if (rd) { // nop
2037                     uint32_t value = GET_FIELD(insn, 10, 31);
2038                     TCGv r_const;
2039
2040                     r_const = tcg_const_tl(value << 10);
2041                     gen_movl_TN_reg(rd, r_const);
2042                     tcg_temp_free(r_const);
2043                 }
2044                 break;
2045             case 0x0:           /* UNIMPL */
2046             default:
2047                 goto illegal_insn;
2048             }
2049             break;
2050         }
2051         break;
2052     case 1:                     /*CALL*/
2053         {
2054             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2055             TCGv r_const;
2056
2057             r_const = tcg_const_tl(dc->pc);
2058             gen_movl_TN_reg(15, r_const);
2059             tcg_temp_free(r_const);
2060             target += dc->pc;
2061             gen_mov_pc_npc(dc, cpu_cond);
2062             dc->npc = target;
2063         }
2064         goto jmp_insn;
2065     case 2:                     /* FPU & Logical Operations */
2066         {
2067             unsigned int xop = GET_FIELD(insn, 7, 12);
2068             if (xop == 0x3a) {  /* generate trap */
2069                 int cond;
2070
2071                 cpu_src1 = get_src1(insn, cpu_src1);
2072                 if (IS_IMM) {
2073                     rs2 = GET_FIELD(insn, 25, 31);
2074                     tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2075                 } else {
2076                     rs2 = GET_FIELD(insn, 27, 31);
2077                     if (rs2 != 0) {
2078                         gen_movl_reg_TN(rs2, cpu_src2);
2079                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2080                     } else
2081                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
2082                 }
2083                 cond = GET_FIELD(insn, 3, 6);
2084                 if (cond == 0x8) {
2085                     save_state(dc, cpu_cond);
2086                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2087                         supervisor(dc))
2088                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2089                     else
2090                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2091                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2092                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2093                     gen_helper_raise_exception(cpu_tmp32);
2094                 } else if (cond != 0) {
2095                     TCGv r_cond = tcg_temp_new();
2096                     int l1;
2097 #ifdef TARGET_SPARC64
2098                     /* V9 icc/xcc */
2099                     int cc = GET_FIELD_SP(insn, 11, 12);
2100
2101                     save_state(dc, cpu_cond);
2102                     if (cc == 0)
2103                         gen_cond(r_cond, 0, cond, dc);
2104                     else if (cc == 2)
2105                         gen_cond(r_cond, 1, cond, dc);
2106                     else
2107                         goto illegal_insn;
2108 #else
2109                     save_state(dc, cpu_cond);
2110                     gen_cond(r_cond, 0, cond, dc);
2111 #endif
2112                     l1 = gen_new_label();
2113                     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2114
2115                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2116                         supervisor(dc))
2117                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2118                     else
2119                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2120                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2121                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2122                     gen_helper_raise_exception(cpu_tmp32);
2123
2124                     gen_set_label(l1);
2125                     tcg_temp_free(r_cond);
2126                 }
2127                 gen_op_next_insn();
2128                 tcg_gen_exit_tb(0);
2129                 dc->is_br = 1;
2130                 goto jmp_insn;
2131             } else if (xop == 0x28) {
2132                 rs1 = GET_FIELD(insn, 13, 17);
2133                 switch(rs1) {
2134                 case 0: /* rdy */
2135 #ifndef TARGET_SPARC64
2136                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2137                                        manual, rdy on the microSPARC
2138                                        II */
2139                 case 0x0f:          /* stbar in the SPARCv8 manual,
2140                                        rdy on the microSPARC II */
2141                 case 0x10 ... 0x1f: /* implementation-dependent in the
2142                                        SPARCv8 manual, rdy on the
2143                                        microSPARC II */
2144 #endif
2145                     gen_movl_TN_reg(rd, cpu_y);
2146                     break;
2147 #ifdef TARGET_SPARC64
2148                 case 0x2: /* V9 rdccr */
2149                     gen_helper_compute_psr();
2150                     gen_helper_rdccr(cpu_dst);
2151                     gen_movl_TN_reg(rd, cpu_dst);
2152                     break;
2153                 case 0x3: /* V9 rdasi */
2154                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2155                     gen_movl_TN_reg(rd, cpu_dst);
2156                     break;
2157                 case 0x4: /* V9 rdtick */
2158                     {
2159                         TCGv_ptr r_tickptr;
2160
2161                         r_tickptr = tcg_temp_new_ptr();
2162                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2163                                        offsetof(CPUState, tick));
2164                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2165                         tcg_temp_free_ptr(r_tickptr);
2166                         gen_movl_TN_reg(rd, cpu_dst);
2167                     }
2168                     break;
2169                 case 0x5: /* V9 rdpc */
2170                     {
2171                         TCGv r_const;
2172
2173                         r_const = tcg_const_tl(dc->pc);
2174                         gen_movl_TN_reg(rd, r_const);
2175                         tcg_temp_free(r_const);
2176                     }
2177                     break;
2178                 case 0x6: /* V9 rdfprs */
2179                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2180                     gen_movl_TN_reg(rd, cpu_dst);
2181                     break;
2182                 case 0xf: /* V9 membar */
2183                     break; /* no effect */
2184                 case 0x13: /* Graphics Status */
2185                     if (gen_trap_ifnofpu(dc, cpu_cond))
2186                         goto jmp_insn;
2187                     gen_movl_TN_reg(rd, cpu_gsr);
2188                     break;
2189                 case 0x16: /* Softint */
2190                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2191                     gen_movl_TN_reg(rd, cpu_dst);
2192                     break;
2193                 case 0x17: /* Tick compare */
2194                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2195                     break;
2196                 case 0x18: /* System tick */
2197                     {
2198                         TCGv_ptr r_tickptr;
2199
2200                         r_tickptr = tcg_temp_new_ptr();
2201                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2202                                        offsetof(CPUState, stick));
2203                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2204                         tcg_temp_free_ptr(r_tickptr);
2205                         gen_movl_TN_reg(rd, cpu_dst);
2206                     }
2207                     break;
2208                 case 0x19: /* System tick compare */
2209                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2210                     break;
2211                 case 0x10: /* Performance Control */
2212                 case 0x11: /* Performance Instrumentation Counter */
2213                 case 0x12: /* Dispatch Control */
2214                 case 0x14: /* Softint set, WO */
2215                 case 0x15: /* Softint clear, WO */
2216 #endif
2217                 default:
2218                     goto illegal_insn;
2219                 }
2220 #if !defined(CONFIG_USER_ONLY)
2221             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2222 #ifndef TARGET_SPARC64
2223                 if (!supervisor(dc))
2224                     goto priv_insn;
2225                 gen_helper_compute_psr();
2226                 dc->cc_op = CC_OP_FLAGS;
2227                 gen_helper_rdpsr(cpu_dst);
2228 #else
2229                 CHECK_IU_FEATURE(dc, HYPV);
2230                 if (!hypervisor(dc))
2231                     goto priv_insn;
2232                 rs1 = GET_FIELD(insn, 13, 17);
2233                 switch (rs1) {
2234                 case 0: // hpstate
2235                     // gen_op_rdhpstate();
2236                     break;
2237                 case 1: // htstate
2238                     // gen_op_rdhtstate();
2239                     break;
2240                 case 3: // hintp
2241                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2242                     break;
2243                 case 5: // htba
2244                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2245                     break;
2246                 case 6: // hver
2247                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2248                     break;
2249                 case 31: // hstick_cmpr
2250                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2251                     break;
2252                 default:
2253                     goto illegal_insn;
2254                 }
2255 #endif
2256                 gen_movl_TN_reg(rd, cpu_dst);
2257                 break;
2258             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2259                 if (!supervisor(dc))
2260                     goto priv_insn;
2261 #ifdef TARGET_SPARC64
2262                 rs1 = GET_FIELD(insn, 13, 17);
2263                 switch (rs1) {
2264                 case 0: // tpc
2265                     {
2266                         TCGv_ptr r_tsptr;
2267
2268                         r_tsptr = tcg_temp_new_ptr();
2269                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2270                                        offsetof(CPUState, tsptr));
2271                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2272                                       offsetof(trap_state, tpc));
2273                         tcg_temp_free_ptr(r_tsptr);
2274                     }
2275                     break;
2276                 case 1: // tnpc
2277                     {
2278                         TCGv_ptr r_tsptr;
2279
2280                         r_tsptr = tcg_temp_new_ptr();
2281                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2282                                        offsetof(CPUState, tsptr));
2283                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2284                                       offsetof(trap_state, tnpc));
2285                         tcg_temp_free_ptr(r_tsptr);
2286                     }
2287                     break;
2288                 case 2: // tstate
2289                     {
2290                         TCGv_ptr r_tsptr;
2291
2292                         r_tsptr = tcg_temp_new_ptr();
2293                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2294                                        offsetof(CPUState, tsptr));
2295                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2296                                       offsetof(trap_state, tstate));
2297                         tcg_temp_free_ptr(r_tsptr);
2298                     }
2299                     break;
2300                 case 3: // tt
2301                     {
2302                         TCGv_ptr r_tsptr;
2303
2304                         r_tsptr = tcg_temp_new_ptr();
2305                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2306                                        offsetof(CPUState, tsptr));
2307                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2308                                        offsetof(trap_state, tt));
2309                         tcg_temp_free_ptr(r_tsptr);
2310                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2311                     }
2312                     break;
2313                 case 4: // tick
2314                     {
2315                         TCGv_ptr r_tickptr;
2316
2317                         r_tickptr = tcg_temp_new_ptr();
2318                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2319                                        offsetof(CPUState, tick));
2320                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2321                         gen_movl_TN_reg(rd, cpu_tmp0);
2322                         tcg_temp_free_ptr(r_tickptr);
2323                     }
2324                     break;
2325                 case 5: // tba
2326                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2327                     break;
2328                 case 6: // pstate
2329                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2330                                    offsetof(CPUSPARCState, pstate));
2331                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2332                     break;
2333                 case 7: // tl
2334                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2335                                    offsetof(CPUSPARCState, tl));
2336                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2337                     break;
2338                 case 8: // pil
2339                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2340                                    offsetof(CPUSPARCState, psrpil));
2341                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2342                     break;
2343                 case 9: // cwp
2344                     gen_helper_rdcwp(cpu_tmp0);
2345                     break;
2346                 case 10: // cansave
2347                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2348                                    offsetof(CPUSPARCState, cansave));
2349                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2350                     break;
2351                 case 11: // canrestore
2352                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2353                                    offsetof(CPUSPARCState, canrestore));
2354                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2355                     break;
2356                 case 12: // cleanwin
2357                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2358                                    offsetof(CPUSPARCState, cleanwin));
2359                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2360                     break;
2361                 case 13: // otherwin
2362                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2363                                    offsetof(CPUSPARCState, otherwin));
2364                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2365                     break;
2366                 case 14: // wstate
2367                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2368                                    offsetof(CPUSPARCState, wstate));
2369                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2370                     break;
2371                 case 16: // UA2005 gl
2372                     CHECK_IU_FEATURE(dc, GL);
2373                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2374                                    offsetof(CPUSPARCState, gl));
2375                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2376                     break;
2377                 case 26: // UA2005 strand status
2378                     CHECK_IU_FEATURE(dc, HYPV);
2379                     if (!hypervisor(dc))
2380                         goto priv_insn;
2381                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2382                     break;
2383                 case 31: // ver
2384                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2385                     break;
2386                 case 15: // fq
2387                 default:
2388                     goto illegal_insn;
2389                 }
2390 #else
2391                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2392 #endif
2393                 gen_movl_TN_reg(rd, cpu_tmp0);
2394                 break;
2395             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2396 #ifdef TARGET_SPARC64
2397                 save_state(dc, cpu_cond);
2398                 gen_helper_flushw();
2399 #else
2400                 if (!supervisor(dc))
2401                     goto priv_insn;
2402                 gen_movl_TN_reg(rd, cpu_tbr);
2403 #endif
2404                 break;
2405 #endif
2406             } else if (xop == 0x34) {   /* FPU Operations */
2407                 if (gen_trap_ifnofpu(dc, cpu_cond))
2408                     goto jmp_insn;
2409                 gen_op_clear_ieee_excp_and_FTT();
2410                 rs1 = GET_FIELD(insn, 13, 17);
2411                 rs2 = GET_FIELD(insn, 27, 31);
2412                 xop = GET_FIELD(insn, 18, 26);
2413                 switch (xop) {
2414                 case 0x1: /* fmovs */
2415                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2416                     break;
2417                 case 0x5: /* fnegs */
2418                     gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2419                     break;
2420                 case 0x9: /* fabss */
2421                     gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2422                     break;
2423                 case 0x29: /* fsqrts */
2424                     CHECK_FPU_FEATURE(dc, FSQRT);
2425                     gen_clear_float_exceptions();
2426                     gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2427                     gen_helper_check_ieee_exceptions();
2428                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2429                     break;
2430                 case 0x2a: /* fsqrtd */
2431                     CHECK_FPU_FEATURE(dc, FSQRT);
2432                     gen_op_load_fpr_DT1(DFPREG(rs2));
2433                     gen_clear_float_exceptions();
2434                     gen_helper_fsqrtd();
2435                     gen_helper_check_ieee_exceptions();
2436                     gen_op_store_DT0_fpr(DFPREG(rd));
2437                     break;
2438                 case 0x2b: /* fsqrtq */
2439                     CHECK_FPU_FEATURE(dc, FLOAT128);
2440                     gen_op_load_fpr_QT1(QFPREG(rs2));
2441                     gen_clear_float_exceptions();
2442                     gen_helper_fsqrtq();
2443                     gen_helper_check_ieee_exceptions();
2444                     gen_op_store_QT0_fpr(QFPREG(rd));
2445                     break;
2446                 case 0x41: /* fadds */
2447                     gen_clear_float_exceptions();
2448                     gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2449                     gen_helper_check_ieee_exceptions();
2450                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2451                     break;
2452                 case 0x42: /* faddd */
2453                     gen_op_load_fpr_DT0(DFPREG(rs1));
2454                     gen_op_load_fpr_DT1(DFPREG(rs2));
2455                     gen_clear_float_exceptions();
2456                     gen_helper_faddd();
2457                     gen_helper_check_ieee_exceptions();
2458                     gen_op_store_DT0_fpr(DFPREG(rd));
2459                     break;
2460                 case 0x43: /* faddq */
2461                     CHECK_FPU_FEATURE(dc, FLOAT128);
2462                     gen_op_load_fpr_QT0(QFPREG(rs1));
2463                     gen_op_load_fpr_QT1(QFPREG(rs2));
2464                     gen_clear_float_exceptions();
2465                     gen_helper_faddq();
2466                     gen_helper_check_ieee_exceptions();
2467                     gen_op_store_QT0_fpr(QFPREG(rd));
2468                     break;
2469                 case 0x45: /* fsubs */
2470                     gen_clear_float_exceptions();
2471                     gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2472                     gen_helper_check_ieee_exceptions();
2473                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2474                     break;
2475                 case 0x46: /* fsubd */
2476                     gen_op_load_fpr_DT0(DFPREG(rs1));
2477                     gen_op_load_fpr_DT1(DFPREG(rs2));
2478                     gen_clear_float_exceptions();
2479                     gen_helper_fsubd();
2480                     gen_helper_check_ieee_exceptions();
2481                     gen_op_store_DT0_fpr(DFPREG(rd));
2482                     break;
2483                 case 0x47: /* fsubq */
2484                     CHECK_FPU_FEATURE(dc, FLOAT128);
2485                     gen_op_load_fpr_QT0(QFPREG(rs1));
2486                     gen_op_load_fpr_QT1(QFPREG(rs2));
2487                     gen_clear_float_exceptions();
2488                     gen_helper_fsubq();
2489                     gen_helper_check_ieee_exceptions();
2490                     gen_op_store_QT0_fpr(QFPREG(rd));
2491                     break;
2492                 case 0x49: /* fmuls */
2493                     CHECK_FPU_FEATURE(dc, FMUL);
2494                     gen_clear_float_exceptions();
2495                     gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2496                     gen_helper_check_ieee_exceptions();
2497                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2498                     break;
2499                 case 0x4a: /* fmuld */
2500                     CHECK_FPU_FEATURE(dc, FMUL);
2501                     gen_op_load_fpr_DT0(DFPREG(rs1));
2502                     gen_op_load_fpr_DT1(DFPREG(rs2));
2503                     gen_clear_float_exceptions();
2504                     gen_helper_fmuld();
2505                     gen_helper_check_ieee_exceptions();
2506                     gen_op_store_DT0_fpr(DFPREG(rd));
2507                     break;
2508                 case 0x4b: /* fmulq */
2509                     CHECK_FPU_FEATURE(dc, FLOAT128);
2510                     CHECK_FPU_FEATURE(dc, FMUL);
2511                     gen_op_load_fpr_QT0(QFPREG(rs1));
2512                     gen_op_load_fpr_QT1(QFPREG(rs2));
2513                     gen_clear_float_exceptions();
2514                     gen_helper_fmulq();
2515                     gen_helper_check_ieee_exceptions();
2516                     gen_op_store_QT0_fpr(QFPREG(rd));
2517                     break;
2518                 case 0x4d: /* fdivs */
2519                     gen_clear_float_exceptions();
2520                     gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2521                     gen_helper_check_ieee_exceptions();
2522                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2523                     break;
2524                 case 0x4e: /* fdivd */
2525                     gen_op_load_fpr_DT0(DFPREG(rs1));
2526                     gen_op_load_fpr_DT1(DFPREG(rs2));
2527                     gen_clear_float_exceptions();
2528                     gen_helper_fdivd();
2529                     gen_helper_check_ieee_exceptions();
2530                     gen_op_store_DT0_fpr(DFPREG(rd));
2531                     break;
2532                 case 0x4f: /* fdivq */
2533                     CHECK_FPU_FEATURE(dc, FLOAT128);
2534                     gen_op_load_fpr_QT0(QFPREG(rs1));
2535                     gen_op_load_fpr_QT1(QFPREG(rs2));
2536                     gen_clear_float_exceptions();
2537                     gen_helper_fdivq();
2538                     gen_helper_check_ieee_exceptions();
2539                     gen_op_store_QT0_fpr(QFPREG(rd));
2540                     break;
2541                 case 0x69: /* fsmuld */
2542                     CHECK_FPU_FEATURE(dc, FSMULD);
2543                     gen_clear_float_exceptions();
2544                     gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2545                     gen_helper_check_ieee_exceptions();
2546                     gen_op_store_DT0_fpr(DFPREG(rd));
2547                     break;
2548                 case 0x6e: /* fdmulq */
2549                     CHECK_FPU_FEATURE(dc, FLOAT128);
2550                     gen_op_load_fpr_DT0(DFPREG(rs1));
2551                     gen_op_load_fpr_DT1(DFPREG(rs2));
2552                     gen_clear_float_exceptions();
2553                     gen_helper_fdmulq();
2554                     gen_helper_check_ieee_exceptions();
2555                     gen_op_store_QT0_fpr(QFPREG(rd));
2556                     break;
2557                 case 0xc4: /* fitos */
2558                     gen_clear_float_exceptions();
2559                     gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2560                     gen_helper_check_ieee_exceptions();
2561                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2562                     break;
2563                 case 0xc6: /* fdtos */
2564                     gen_op_load_fpr_DT1(DFPREG(rs2));
2565                     gen_clear_float_exceptions();
2566                     gen_helper_fdtos(cpu_tmp32);
2567                     gen_helper_check_ieee_exceptions();
2568                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2569                     break;
2570                 case 0xc7: /* fqtos */
2571                     CHECK_FPU_FEATURE(dc, FLOAT128);
2572                     gen_op_load_fpr_QT1(QFPREG(rs2));
2573                     gen_clear_float_exceptions();
2574                     gen_helper_fqtos(cpu_tmp32);
2575                     gen_helper_check_ieee_exceptions();
2576                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2577                     break;
2578                 case 0xc8: /* fitod */
2579                     gen_helper_fitod(cpu_fpr[rs2]);
2580                     gen_op_store_DT0_fpr(DFPREG(rd));
2581                     break;
2582                 case 0xc9: /* fstod */
2583                     gen_helper_fstod(cpu_fpr[rs2]);
2584                     gen_op_store_DT0_fpr(DFPREG(rd));
2585                     break;
2586                 case 0xcb: /* fqtod */
2587                     CHECK_FPU_FEATURE(dc, FLOAT128);
2588                     gen_op_load_fpr_QT1(QFPREG(rs2));
2589                     gen_clear_float_exceptions();
2590                     gen_helper_fqtod();
2591                     gen_helper_check_ieee_exceptions();
2592                     gen_op_store_DT0_fpr(DFPREG(rd));
2593                     break;
2594                 case 0xcc: /* fitoq */
2595                     CHECK_FPU_FEATURE(dc, FLOAT128);
2596                     gen_helper_fitoq(cpu_fpr[rs2]);
2597                     gen_op_store_QT0_fpr(QFPREG(rd));
2598                     break;
2599                 case 0xcd: /* fstoq */
2600                     CHECK_FPU_FEATURE(dc, FLOAT128);
2601                     gen_helper_fstoq(cpu_fpr[rs2]);
2602                     gen_op_store_QT0_fpr(QFPREG(rd));
2603                     break;
2604                 case 0xce: /* fdtoq */
2605                     CHECK_FPU_FEATURE(dc, FLOAT128);
2606                     gen_op_load_fpr_DT1(DFPREG(rs2));
2607                     gen_helper_fdtoq();
2608                     gen_op_store_QT0_fpr(QFPREG(rd));
2609                     break;
2610                 case 0xd1: /* fstoi */
2611                     gen_clear_float_exceptions();
2612                     gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2613                     gen_helper_check_ieee_exceptions();
2614                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2615                     break;
2616                 case 0xd2: /* fdtoi */
2617                     gen_op_load_fpr_DT1(DFPREG(rs2));
2618                     gen_clear_float_exceptions();
2619                     gen_helper_fdtoi(cpu_tmp32);
2620                     gen_helper_check_ieee_exceptions();
2621                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2622                     break;
2623                 case 0xd3: /* fqtoi */
2624                     CHECK_FPU_FEATURE(dc, FLOAT128);
2625                     gen_op_load_fpr_QT1(QFPREG(rs2));
2626                     gen_clear_float_exceptions();
2627                     gen_helper_fqtoi(cpu_tmp32);
2628                     gen_helper_check_ieee_exceptions();
2629                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2630                     break;
2631 #ifdef TARGET_SPARC64
2632                 case 0x2: /* V9 fmovd */
2633                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2634                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2635                                     cpu_fpr[DFPREG(rs2) + 1]);
2636                     break;
2637                 case 0x3: /* V9 fmovq */
2638                     CHECK_FPU_FEATURE(dc, FLOAT128);
2639                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2640                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2641                                     cpu_fpr[QFPREG(rs2) + 1]);
2642                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2643                                     cpu_fpr[QFPREG(rs2) + 2]);
2644                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2645                                     cpu_fpr[QFPREG(rs2) + 3]);
2646                     break;
2647                 case 0x6: /* V9 fnegd */
2648                     gen_op_load_fpr_DT1(DFPREG(rs2));
2649                     gen_helper_fnegd();
2650                     gen_op_store_DT0_fpr(DFPREG(rd));
2651                     break;
2652                 case 0x7: /* V9 fnegq */
2653                     CHECK_FPU_FEATURE(dc, FLOAT128);
2654                     gen_op_load_fpr_QT1(QFPREG(rs2));
2655                     gen_helper_fnegq();
2656                     gen_op_store_QT0_fpr(QFPREG(rd));
2657                     break;
2658                 case 0xa: /* V9 fabsd */
2659                     gen_op_load_fpr_DT1(DFPREG(rs2));
2660                     gen_helper_fabsd();
2661                     gen_op_store_DT0_fpr(DFPREG(rd));
2662                     break;
2663                 case 0xb: /* V9 fabsq */
2664                     CHECK_FPU_FEATURE(dc, FLOAT128);
2665                     gen_op_load_fpr_QT1(QFPREG(rs2));
2666                     gen_helper_fabsq();
2667                     gen_op_store_QT0_fpr(QFPREG(rd));
2668                     break;
2669                 case 0x81: /* V9 fstox */
2670                     gen_clear_float_exceptions();
2671                     gen_helper_fstox(cpu_fpr[rs2]);
2672                     gen_helper_check_ieee_exceptions();
2673                     gen_op_store_DT0_fpr(DFPREG(rd));
2674                     break;
2675                 case 0x82: /* V9 fdtox */
2676                     gen_op_load_fpr_DT1(DFPREG(rs2));
2677                     gen_clear_float_exceptions();
2678                     gen_helper_fdtox();
2679                     gen_helper_check_ieee_exceptions();
2680                     gen_op_store_DT0_fpr(DFPREG(rd));
2681                     break;
2682                 case 0x83: /* V9 fqtox */
2683                     CHECK_FPU_FEATURE(dc, FLOAT128);
2684                     gen_op_load_fpr_QT1(QFPREG(rs2));
2685                     gen_clear_float_exceptions();
2686                     gen_helper_fqtox();
2687                     gen_helper_check_ieee_exceptions();
2688                     gen_op_store_DT0_fpr(DFPREG(rd));
2689                     break;
2690                 case 0x84: /* V9 fxtos */
2691                     gen_op_load_fpr_DT1(DFPREG(rs2));
2692                     gen_clear_float_exceptions();
2693                     gen_helper_fxtos(cpu_tmp32);
2694                     gen_helper_check_ieee_exceptions();
2695                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2696                     break;
2697                 case 0x88: /* V9 fxtod */
2698                     gen_op_load_fpr_DT1(DFPREG(rs2));
2699                     gen_clear_float_exceptions();
2700                     gen_helper_fxtod();
2701                     gen_helper_check_ieee_exceptions();
2702                     gen_op_store_DT0_fpr(DFPREG(rd));
2703                     break;
2704                 case 0x8c: /* V9 fxtoq */
2705                     CHECK_FPU_FEATURE(dc, FLOAT128);
2706                     gen_op_load_fpr_DT1(DFPREG(rs2));
2707                     gen_clear_float_exceptions();
2708                     gen_helper_fxtoq();
2709                     gen_helper_check_ieee_exceptions();
2710                     gen_op_store_QT0_fpr(QFPREG(rd));
2711                     break;
2712 #endif
2713                 default:
2714                     goto illegal_insn;
2715                 }
2716             } else if (xop == 0x35) {   /* FPU Operations */
2717 #ifdef TARGET_SPARC64
2718                 int cond;
2719 #endif
2720                 if (gen_trap_ifnofpu(dc, cpu_cond))
2721                     goto jmp_insn;
2722                 gen_op_clear_ieee_excp_and_FTT();
2723                 rs1 = GET_FIELD(insn, 13, 17);
2724                 rs2 = GET_FIELD(insn, 27, 31);
2725                 xop = GET_FIELD(insn, 18, 26);
2726 #ifdef TARGET_SPARC64
2727                 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2728                     int l1;
2729
2730                     l1 = gen_new_label();
2731                     cond = GET_FIELD_SP(insn, 14, 17);
2732                     cpu_src1 = get_src1(insn, cpu_src1);
2733                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2734                                        0, l1);
2735                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2736                     gen_set_label(l1);
2737                     break;
2738                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2739                     int l1;
2740
2741                     l1 = gen_new_label();
2742                     cond = GET_FIELD_SP(insn, 14, 17);
2743                     cpu_src1 = get_src1(insn, cpu_src1);
2744                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2745                                        0, l1);
2746                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2747                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2748                     gen_set_label(l1);
2749                     break;
2750                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2751                     int l1;
2752
2753                     CHECK_FPU_FEATURE(dc, FLOAT128);
2754                     l1 = gen_new_label();
2755                     cond = GET_FIELD_SP(insn, 14, 17);
2756                     cpu_src1 = get_src1(insn, cpu_src1);
2757                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2758                                        0, l1);
2759                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2760                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2761                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2762                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2763                     gen_set_label(l1);
2764                     break;
2765                 }
2766 #endif
2767                 switch (xop) {
2768 #ifdef TARGET_SPARC64
2769 #define FMOVSCC(fcc)                                                    \
2770                     {                                                   \
2771                         TCGv r_cond;                                    \
2772                         int l1;                                         \
2773                                                                         \
2774                         l1 = gen_new_label();                           \
2775                         r_cond = tcg_temp_new();                        \
2776                         cond = GET_FIELD_SP(insn, 14, 17);              \
2777                         gen_fcond(r_cond, fcc, cond);                   \
2778                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2779                                            0, l1);                      \
2780                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2781                         gen_set_label(l1);                              \
2782                         tcg_temp_free(r_cond);                          \
2783                     }
2784 #define FMOVDCC(fcc)                                                    \
2785                     {                                                   \
2786                         TCGv r_cond;                                    \
2787                         int l1;                                         \
2788                                                                         \
2789                         l1 = gen_new_label();                           \
2790                         r_cond = tcg_temp_new();                        \
2791                         cond = GET_FIELD_SP(insn, 14, 17);              \
2792                         gen_fcond(r_cond, fcc, cond);                   \
2793                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2794                                            0, l1);                      \
2795                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2796                                         cpu_fpr[DFPREG(rs2)]);          \
2797                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2798                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2799                         gen_set_label(l1);                              \
2800                         tcg_temp_free(r_cond);                          \
2801                     }
2802 #define FMOVQCC(fcc)                                                    \
2803                     {                                                   \
2804                         TCGv r_cond;                                    \
2805                         int l1;                                         \
2806                                                                         \
2807                         l1 = gen_new_label();                           \
2808                         r_cond = tcg_temp_new();                        \
2809                         cond = GET_FIELD_SP(insn, 14, 17);              \
2810                         gen_fcond(r_cond, fcc, cond);                   \
2811                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2812                                            0, l1);                      \
2813                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2814                                         cpu_fpr[QFPREG(rs2)]);          \
2815                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2816                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2817                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2818                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2819                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2820                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2821                         gen_set_label(l1);                              \
2822                         tcg_temp_free(r_cond);                          \
2823                     }
2824                     case 0x001: /* V9 fmovscc %fcc0 */
2825                         FMOVSCC(0);
2826                         break;
2827                     case 0x002: /* V9 fmovdcc %fcc0 */
2828                         FMOVDCC(0);
2829                         break;
2830                     case 0x003: /* V9 fmovqcc %fcc0 */
2831                         CHECK_FPU_FEATURE(dc, FLOAT128);
2832                         FMOVQCC(0);
2833                         break;
2834                     case 0x041: /* V9 fmovscc %fcc1 */
2835                         FMOVSCC(1);
2836                         break;
2837                     case 0x042: /* V9 fmovdcc %fcc1 */
2838                         FMOVDCC(1);
2839                         break;
2840                     case 0x043: /* V9 fmovqcc %fcc1 */
2841                         CHECK_FPU_FEATURE(dc, FLOAT128);
2842                         FMOVQCC(1);
2843                         break;
2844                     case 0x081: /* V9 fmovscc %fcc2 */
2845                         FMOVSCC(2);
2846                         break;
2847                     case 0x082: /* V9 fmovdcc %fcc2 */
2848                         FMOVDCC(2);
2849                         break;
2850                     case 0x083: /* V9 fmovqcc %fcc2 */
2851                         CHECK_FPU_FEATURE(dc, FLOAT128);
2852                         FMOVQCC(2);
2853                         break;
2854                     case 0x0c1: /* V9 fmovscc %fcc3 */
2855                         FMOVSCC(3);
2856                         break;
2857                     case 0x0c2: /* V9 fmovdcc %fcc3 */
2858                         FMOVDCC(3);
2859                         break;
2860                     case 0x0c3: /* V9 fmovqcc %fcc3 */
2861                         CHECK_FPU_FEATURE(dc, FLOAT128);
2862                         FMOVQCC(3);
2863                         break;
2864 #undef FMOVSCC
2865 #undef FMOVDCC
2866 #undef FMOVQCC
2867 #define FMOVSCC(icc)                                                    \
2868                     {                                                   \
2869                         TCGv r_cond;                                    \
2870                         int l1;                                         \
2871                                                                         \
2872                         l1 = gen_new_label();                           \
2873                         r_cond = tcg_temp_new();                        \
2874                         cond = GET_FIELD_SP(insn, 14, 17);              \
2875                         gen_cond(r_cond, icc, cond, dc);                \
2876                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2877                                            0, l1);                      \
2878                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2879                         gen_set_label(l1);                              \
2880                         tcg_temp_free(r_cond);                          \
2881                     }
2882 #define FMOVDCC(icc)                                                    \
2883                     {                                                   \
2884                         TCGv r_cond;                                    \
2885                         int l1;                                         \
2886                                                                         \
2887                         l1 = gen_new_label();                           \
2888                         r_cond = tcg_temp_new();                        \
2889                         cond = GET_FIELD_SP(insn, 14, 17);              \
2890                         gen_cond(r_cond, icc, cond, dc);                \
2891                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2892                                            0, l1);                      \
2893                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2894                                         cpu_fpr[DFPREG(rs2)]);          \
2895                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2896                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2897                         gen_set_label(l1);                              \
2898                         tcg_temp_free(r_cond);                          \
2899                     }
2900 #define FMOVQCC(icc)                                                    \
2901                     {                                                   \
2902                         TCGv r_cond;                                    \
2903                         int l1;                                         \
2904                                                                         \
2905                         l1 = gen_new_label();                           \
2906                         r_cond = tcg_temp_new();                        \
2907                         cond = GET_FIELD_SP(insn, 14, 17);              \
2908                         gen_cond(r_cond, icc, cond, dc);                \
2909                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2910                                            0, l1);                      \
2911                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2912                                         cpu_fpr[QFPREG(rs2)]);          \
2913                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2914                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2915                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2916                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2917                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2918                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2919                         gen_set_label(l1);                              \
2920                         tcg_temp_free(r_cond);                          \
2921                     }
2922
2923                     case 0x101: /* V9 fmovscc %icc */
2924                         FMOVSCC(0);
2925                         break;
2926                     case 0x102: /* V9 fmovdcc %icc */
2927                         FMOVDCC(0);
2928                     case 0x103: /* V9 fmovqcc %icc */
2929                         CHECK_FPU_FEATURE(dc, FLOAT128);
2930                         FMOVQCC(0);
2931                         break;
2932                     case 0x181: /* V9 fmovscc %xcc */
2933                         FMOVSCC(1);
2934                         break;
2935                     case 0x182: /* V9 fmovdcc %xcc */
2936                         FMOVDCC(1);
2937                         break;
2938                     case 0x183: /* V9 fmovqcc %xcc */
2939                         CHECK_FPU_FEATURE(dc, FLOAT128);
2940                         FMOVQCC(1);
2941                         break;
2942 #undef FMOVSCC
2943 #undef FMOVDCC
2944 #undef FMOVQCC
2945 #endif
2946                     case 0x51: /* fcmps, V9 %fcc */
2947                         gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2948                         break;
2949                     case 0x52: /* fcmpd, V9 %fcc */
2950                         gen_op_load_fpr_DT0(DFPREG(rs1));
2951                         gen_op_load_fpr_DT1(DFPREG(rs2));
2952                         gen_op_fcmpd(rd & 3);
2953                         break;
2954                     case 0x53: /* fcmpq, V9 %fcc */
2955                         CHECK_FPU_FEATURE(dc, FLOAT128);
2956                         gen_op_load_fpr_QT0(QFPREG(rs1));
2957                         gen_op_load_fpr_QT1(QFPREG(rs2));
2958                         gen_op_fcmpq(rd & 3);
2959                         break;
2960                     case 0x55: /* fcmpes, V9 %fcc */
2961                         gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2962                         break;
2963                     case 0x56: /* fcmped, V9 %fcc */
2964                         gen_op_load_fpr_DT0(DFPREG(rs1));
2965                         gen_op_load_fpr_DT1(DFPREG(rs2));
2966                         gen_op_fcmped(rd & 3);
2967                         break;
2968                     case 0x57: /* fcmpeq, V9 %fcc */
2969                         CHECK_FPU_FEATURE(dc, FLOAT128);
2970                         gen_op_load_fpr_QT0(QFPREG(rs1));
2971                         gen_op_load_fpr_QT1(QFPREG(rs2));
2972                         gen_op_fcmpeq(rd & 3);
2973                         break;
2974                     default:
2975                         goto illegal_insn;
2976                 }
2977             } else if (xop == 0x2) {
2978                 // clr/mov shortcut
2979
2980                 rs1 = GET_FIELD(insn, 13, 17);
2981                 if (rs1 == 0) {
2982                     // or %g0, x, y -> mov T0, x; mov y, T0
2983                     if (IS_IMM) {       /* immediate */
2984                         TCGv r_const;
2985
2986                         simm = GET_FIELDs(insn, 19, 31);
2987                         r_const = tcg_const_tl(simm);
2988                         gen_movl_TN_reg(rd, r_const);
2989                         tcg_temp_free(r_const);
2990                     } else {            /* register */
2991                         rs2 = GET_FIELD(insn, 27, 31);
2992                         gen_movl_reg_TN(rs2, cpu_dst);
2993                         gen_movl_TN_reg(rd, cpu_dst);
2994                     }
2995                 } else {
2996                     cpu_src1 = get_src1(insn, cpu_src1);
2997                     if (IS_IMM) {       /* immediate */
2998                         simm = GET_FIELDs(insn, 19, 31);
2999                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3000                         gen_movl_TN_reg(rd, cpu_dst);
3001                     } else {            /* register */
3002                         // or x, %g0, y -> mov T1, x; mov y, T1
3003                         rs2 = GET_FIELD(insn, 27, 31);
3004                         if (rs2 != 0) {
3005                             gen_movl_reg_TN(rs2, cpu_src2);
3006                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3007                             gen_movl_TN_reg(rd, cpu_dst);
3008                         } else
3009                             gen_movl_TN_reg(rd, cpu_src1);
3010                     }
3011                 }
3012 #ifdef TARGET_SPARC64
3013             } else if (xop == 0x25) { /* sll, V9 sllx */
3014                 cpu_src1 = get_src1(insn, cpu_src1);
3015                 if (IS_IMM) {   /* immediate */
3016                     simm = GET_FIELDs(insn, 20, 31);
3017                     if (insn & (1 << 12)) {
3018                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3019                     } else {
3020                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3021                     }
3022                 } else {                /* register */
3023                     rs2 = GET_FIELD(insn, 27, 31);
3024                     gen_movl_reg_TN(rs2, cpu_src2);
3025                     if (insn & (1 << 12)) {
3026                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3027                     } else {
3028                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3029                     }
3030                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3031                 }
3032                 gen_movl_TN_reg(rd, cpu_dst);
3033             } else if (xop == 0x26) { /* srl, V9 srlx */
3034                 cpu_src1 = get_src1(insn, cpu_src1);
3035                 if (IS_IMM) {   /* immediate */
3036                     simm = GET_FIELDs(insn, 20, 31);
3037                     if (insn & (1 << 12)) {
3038                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3039                     } else {
3040                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3041                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3042                     }
3043                 } else {                /* register */
3044                     rs2 = GET_FIELD(insn, 27, 31);
3045                     gen_movl_reg_TN(rs2, cpu_src2);
3046                     if (insn & (1 << 12)) {
3047                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3048                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3049                     } else {
3050                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3051                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3052                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3053                     }
3054                 }
3055                 gen_movl_TN_reg(rd, cpu_dst);
3056             } else if (xop == 0x27) { /* sra, V9 srax */
3057                 cpu_src1 = get_src1(insn, cpu_src1);
3058                 if (IS_IMM) {   /* immediate */
3059                     simm = GET_FIELDs(insn, 20, 31);
3060                     if (insn & (1 << 12)) {
3061                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3062                     } else {
3063                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3064                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3065                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3066                     }
3067                 } else {                /* register */
3068                     rs2 = GET_FIELD(insn, 27, 31);
3069                     gen_movl_reg_TN(rs2, cpu_src2);
3070                     if (insn & (1 << 12)) {
3071                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3072                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3073                     } else {
3074                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3075                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3076                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3077                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3078                     }
3079                 }
3080                 gen_movl_TN_reg(rd, cpu_dst);
3081 #endif
3082             } else if (xop < 0x36) {
3083                 if (xop < 0x20) {
3084                     cpu_src1 = get_src1(insn, cpu_src1);
3085                     cpu_src2 = get_src2(insn, cpu_src2);
3086                     switch (xop & ~0x10) {
3087                     case 0x0: /* add */
3088                         if (IS_IMM) {
3089                             simm = GET_FIELDs(insn, 19, 31);
3090                             if (xop & 0x10) {
3091                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3092                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3093                                 dc->cc_op = CC_OP_ADD;
3094                             } else {
3095                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3096                             }
3097                         } else {
3098                             if (xop & 0x10) {
3099                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3100                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3101                                 dc->cc_op = CC_OP_ADD;
3102                             } else {
3103                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3104                             }
3105                         }
3106                         break;
3107                     case 0x1: /* and */
3108                         if (IS_IMM) {
3109                             simm = GET_FIELDs(insn, 19, 31);
3110                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3111                         } else {
3112                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3113                         }
3114                         if (xop & 0x10) {
3115                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3116                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3117                             dc->cc_op = CC_OP_LOGIC;
3118                         }
3119                         break;
3120                     case 0x2: /* or */
3121                         if (IS_IMM) {
3122                             simm = GET_FIELDs(insn, 19, 31);
3123                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3124                         } else {
3125                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3126                         }
3127                         if (xop & 0x10) {
3128                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3129                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3130                             dc->cc_op = CC_OP_LOGIC;
3131                         }
3132                         break;
3133                     case 0x3: /* xor */
3134                         if (IS_IMM) {
3135                             simm = GET_FIELDs(insn, 19, 31);
3136                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3137                         } else {
3138                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3139                         }
3140                         if (xop & 0x10) {
3141                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3142                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3143                             dc->cc_op = CC_OP_LOGIC;
3144                         }
3145                         break;
3146                     case 0x4: /* sub */
3147                         if (IS_IMM) {
3148                             simm = GET_FIELDs(insn, 19, 31);
3149                             if (xop & 0x10) {
3150                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3151                             } else {
3152                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3153                             }
3154                         } else {
3155                             if (xop & 0x10) {
3156                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3157                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3158                                 dc->cc_op = CC_OP_SUB;
3159                             } else {
3160                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3161                             }
3162                         }
3163                         break;
3164                     case 0x5: /* andn */
3165                         if (IS_IMM) {
3166                             simm = GET_FIELDs(insn, 19, 31);
3167                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3168                         } else {
3169                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3170                         }
3171                         if (xop & 0x10) {
3172                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3173                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3174                             dc->cc_op = CC_OP_LOGIC;
3175                         }
3176                         break;
3177                     case 0x6: /* orn */
3178                         if (IS_IMM) {
3179                             simm = GET_FIELDs(insn, 19, 31);
3180                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3181                         } else {
3182                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3183                         }
3184                         if (xop & 0x10) {
3185                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3186                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3187                             dc->cc_op = CC_OP_LOGIC;
3188                         }
3189                         break;
3190                     case 0x7: /* xorn */
3191                         if (IS_IMM) {
3192                             simm = GET_FIELDs(insn, 19, 31);
3193                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3194                         } else {
3195                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3196                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3197                         }
3198                         if (xop & 0x10) {
3199                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3200                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3201                             dc->cc_op = CC_OP_LOGIC;
3202                         }
3203                         break;
3204                     case 0x8: /* addx, V9 addc */
3205                         if (IS_IMM) {
3206                             simm = GET_FIELDs(insn, 19, 31);
3207                             if (xop & 0x10) {
3208                                 gen_helper_compute_psr();
3209                                 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3210                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3211                                 dc->cc_op = CC_OP_ADDX;
3212                             } else {
3213                                 gen_helper_compute_psr();
3214                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3215                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3216                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3217                             }
3218                         } else {
3219                             if (xop & 0x10) {
3220                                 gen_helper_compute_psr();
3221                                 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3222                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3223                                 dc->cc_op = CC_OP_ADDX;
3224                             } else {
3225                                 gen_helper_compute_psr();
3226                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3227                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3228                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3229                             }
3230                         }
3231                         break;
3232 #ifdef TARGET_SPARC64
3233                     case 0x9: /* V9 mulx */
3234                         if (IS_IMM) {
3235                             simm = GET_FIELDs(insn, 19, 31);
3236                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3237                         } else {
3238                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3239                         }
3240                         break;
3241 #endif
3242                     case 0xa: /* umul */
3243                         CHECK_IU_FEATURE(dc, MUL);
3244                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3245                         if (xop & 0x10) {
3246                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3247                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3248                             dc->cc_op = CC_OP_LOGIC;
3249                         }
3250                         break;
3251                     case 0xb: /* smul */
3252                         CHECK_IU_FEATURE(dc, MUL);
3253                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3254                         if (xop & 0x10) {
3255                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3256                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3257                             dc->cc_op = CC_OP_LOGIC;
3258                         }
3259                         break;
3260                     case 0xc: /* subx, V9 subc */
3261                         if (IS_IMM) {
3262                             simm = GET_FIELDs(insn, 19, 31);
3263                             if (xop & 0x10) {
3264                                 gen_helper_compute_psr();
3265                                 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3266                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3267                                 dc->cc_op = CC_OP_FLAGS;
3268                             } else {
3269                                 gen_helper_compute_psr();
3270                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3271                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3272                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3273                             }
3274                         } else {
3275                             if (xop & 0x10) {
3276                                 gen_helper_compute_psr();
3277                                 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3278                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3279                                 dc->cc_op = CC_OP_FLAGS;
3280                             } else {
3281                                 gen_helper_compute_psr();
3282                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3283                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3284                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3285                             }
3286                         }
3287                         break;
3288 #ifdef TARGET_SPARC64
3289                     case 0xd: /* V9 udivx */
3290                         tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3291                         tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3292                         gen_trap_ifdivzero_tl(cpu_cc_src2);
3293                         tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3294                         break;
3295 #endif
3296                     case 0xe: /* udiv */
3297                         CHECK_IU_FEATURE(dc, DIV);
3298                         gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3299                         if (xop & 0x10) {
3300                             gen_op_div_cc(cpu_dst);
3301                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3302                             dc->cc_op = CC_OP_FLAGS;
3303                         }
3304                         break;
3305                     case 0xf: /* sdiv */
3306                         CHECK_IU_FEATURE(dc, DIV);
3307                         gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3308                         if (xop & 0x10) {
3309                             gen_op_div_cc(cpu_dst);
3310                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3311                             dc->cc_op = CC_OP_FLAGS;
3312                         }
3313                         break;
3314                     default:
3315                         goto illegal_insn;
3316                     }
3317                     gen_movl_TN_reg(rd, cpu_dst);
3318                 } else {
3319                     cpu_src1 = get_src1(insn, cpu_src1);
3320                     cpu_src2 = get_src2(insn, cpu_src2);
3321                     switch (xop) {
3322                     case 0x20: /* taddcc */
3323                         gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3324                         gen_movl_TN_reg(rd, cpu_dst);
3325                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3326                         dc->cc_op = CC_OP_FLAGS;
3327                         break;
3328                     case 0x21: /* tsubcc */
3329                         gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3330                         gen_movl_TN_reg(rd, cpu_dst);
3331                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3332                         dc->cc_op = CC_OP_FLAGS;
3333                         break;
3334                     case 0x22: /* taddcctv */
3335                         save_state(dc, cpu_cond);
3336                         gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3337                         gen_movl_TN_reg(rd, cpu_dst);
3338                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3339                         dc->cc_op = CC_OP_FLAGS;
3340                         break;
3341                     case 0x23: /* tsubcctv */
3342                         save_state(dc, cpu_cond);
3343                         gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3344                         gen_movl_TN_reg(rd, cpu_dst);
3345                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3346                         dc->cc_op = CC_OP_FLAGS;
3347                         break;
3348                     case 0x24: /* mulscc */
3349                         gen_helper_compute_psr();
3350                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3351                         gen_movl_TN_reg(rd, cpu_dst);
3352                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3353                         dc->cc_op = CC_OP_FLAGS;
3354                         break;
3355 #ifndef TARGET_SPARC64
3356                     case 0x25:  /* sll */
3357                         if (IS_IMM) { /* immediate */
3358                             simm = GET_FIELDs(insn, 20, 31);
3359                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3360                         } else { /* register */
3361                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3362                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3363                         }
3364                         gen_movl_TN_reg(rd, cpu_dst);
3365                         break;
3366                     case 0x26:  /* srl */
3367                         if (IS_IMM) { /* immediate */
3368                             simm = GET_FIELDs(insn, 20, 31);
3369                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3370                         } else { /* register */
3371                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3372                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3373                         }
3374                         gen_movl_TN_reg(rd, cpu_dst);
3375                         break;
3376                     case 0x27:  /* sra */
3377                         if (IS_IMM) { /* immediate */
3378                             simm = GET_FIELDs(insn, 20, 31);
3379                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3380                         } else { /* register */
3381                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3382                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3383                         }
3384                         gen_movl_TN_reg(rd, cpu_dst);
3385                         break;
3386 #endif
3387                     case 0x30:
3388                         {
3389                             switch(rd) {
3390                             case 0: /* wry */
3391                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3392                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3393                                 break;
3394 #ifndef TARGET_SPARC64
3395                             case 0x01 ... 0x0f: /* undefined in the
3396                                                    SPARCv8 manual, nop
3397                                                    on the microSPARC
3398                                                    II */
3399                             case 0x10 ... 0x1f: /* implementation-dependent
3400                                                    in the SPARCv8
3401                                                    manual, nop on the
3402                                                    microSPARC II */
3403                                 break;
3404 #else
3405                             case 0x2: /* V9 wrccr */
3406                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3407                                 gen_helper_wrccr(cpu_dst);
3408                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3409                                 dc->cc_op = CC_OP_FLAGS;
3410                                 break;
3411                             case 0x3: /* V9 wrasi */
3412                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3413                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3414                                 break;
3415                             case 0x6: /* V9 wrfprs */
3416                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3417                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3418                                 save_state(dc, cpu_cond);
3419                                 gen_op_next_insn();
3420                                 tcg_gen_exit_tb(0);
3421                                 dc->is_br = 1;
3422                                 break;
3423                             case 0xf: /* V9 sir, nop if user */
3424 #if !defined(CONFIG_USER_ONLY)
3425                                 if (supervisor(dc))
3426                                     ; // XXX
3427 #endif
3428                                 break;
3429                             case 0x13: /* Graphics Status */
3430                                 if (gen_trap_ifnofpu(dc, cpu_cond))
3431                                     goto jmp_insn;
3432                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3433                                 break;
3434                             case 0x14: /* Softint set */
3435                                 if (!supervisor(dc))
3436                                     goto illegal_insn;
3437                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3438                                 gen_helper_set_softint(cpu_tmp64);
3439                                 break;
3440                             case 0x15: /* Softint clear */
3441                                 if (!supervisor(dc))
3442                                     goto illegal_insn;
3443                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3444                                 gen_helper_clear_softint(cpu_tmp64);
3445                                 break;
3446                             case 0x16: /* Softint write */
3447                                 if (!supervisor(dc))
3448                                     goto illegal_insn;
3449                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3450                                 gen_helper_write_softint(cpu_tmp64);
3451                                 break;
3452                             case 0x17: /* Tick compare */
3453 #if !defined(CONFIG_USER_ONLY)
3454                                 if (!supervisor(dc))
3455                                     goto illegal_insn;
3456 #endif
3457                                 {
3458                                     TCGv_ptr r_tickptr;
3459
3460                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3461                                                    cpu_src2);
3462                                     r_tickptr = tcg_temp_new_ptr();
3463                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3464                                                    offsetof(CPUState, tick));
3465                                     gen_helper_tick_set_limit(r_tickptr,
3466                                                               cpu_tick_cmpr);
3467                                     tcg_temp_free_ptr(r_tickptr);
3468                                 }
3469                                 break;
3470                             case 0x18: /* System tick */
3471 #if !defined(CONFIG_USER_ONLY)
3472                                 if (!supervisor(dc))
3473                                     goto illegal_insn;
3474 #endif
3475                                 {
3476                                     TCGv_ptr r_tickptr;
3477
3478                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3479                                                    cpu_src2);
3480                                     r_tickptr = tcg_temp_new_ptr();
3481                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3482                                                    offsetof(CPUState, stick));
3483                                     gen_helper_tick_set_count(r_tickptr,
3484                                                               cpu_dst);
3485                                     tcg_temp_free_ptr(r_tickptr);
3486                                 }
3487                                 break;
3488                             case 0x19: /* System tick compare */
3489 #if !defined(CONFIG_USER_ONLY)
3490                                 if (!supervisor(dc))
3491                                     goto illegal_insn;
3492 #endif
3493                                 {
3494                                     TCGv_ptr r_tickptr;
3495
3496                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3497                                                    cpu_src2);
3498                                     r_tickptr = tcg_temp_new_ptr();
3499                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3500                                                    offsetof(CPUState, stick));
3501                                     gen_helper_tick_set_limit(r_tickptr,
3502                                                               cpu_stick_cmpr);
3503                                     tcg_temp_free_ptr(r_tickptr);
3504                                 }
3505                                 break;
3506
3507                             case 0x10: /* Performance Control */
3508                             case 0x11: /* Performance Instrumentation
3509                                           Counter */
3510                             case 0x12: /* Dispatch Control */
3511 #endif
3512                             default:
3513                                 goto illegal_insn;
3514                             }
3515                         }
3516                         break;
3517 #if !defined(CONFIG_USER_ONLY)
3518                     case 0x31: /* wrpsr, V9 saved, restored */
3519                         {
3520                             if (!supervisor(dc))
3521                                 goto priv_insn;
3522 #ifdef TARGET_SPARC64
3523                             switch (rd) {
3524                             case 0:
3525                                 gen_helper_saved();
3526                                 break;
3527                             case 1:
3528                                 gen_helper_restored();
3529                                 break;
3530                             case 2: /* UA2005 allclean */
3531                             case 3: /* UA2005 otherw */
3532                             case 4: /* UA2005 normalw */
3533                             case 5: /* UA2005 invalw */
3534                                 // XXX
3535                             default:
3536                                 goto illegal_insn;
3537                             }
3538 #else
3539                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3540                             gen_helper_wrpsr(cpu_dst);
3541                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3542                             dc->cc_op = CC_OP_FLAGS;
3543                             save_state(dc, cpu_cond);
3544                             gen_op_next_insn();
3545                             tcg_gen_exit_tb(0);
3546                             dc->is_br = 1;
3547 #endif
3548                         }
3549                         break;
3550                     case 0x32: /* wrwim, V9 wrpr */
3551                         {
3552                             if (!supervisor(dc))
3553                                 goto priv_insn;
3554                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3555 #ifdef TARGET_SPARC64
3556                             switch (rd) {
3557                             case 0: // tpc
3558                                 {
3559                                     TCGv_ptr r_tsptr;
3560
3561                                     r_tsptr = tcg_temp_new_ptr();
3562                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3563                                                    offsetof(CPUState, tsptr));
3564                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3565                                                   offsetof(trap_state, tpc));
3566                                     tcg_temp_free_ptr(r_tsptr);
3567                                 }
3568                                 break;
3569                             case 1: // tnpc
3570                                 {
3571                                     TCGv_ptr r_tsptr;
3572
3573                                     r_tsptr = tcg_temp_new_ptr();
3574                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3575                                                    offsetof(CPUState, tsptr));
3576                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3577                                                   offsetof(trap_state, tnpc));
3578                                     tcg_temp_free_ptr(r_tsptr);
3579                                 }
3580                                 break;
3581                             case 2: // tstate
3582                                 {
3583                                     TCGv_ptr r_tsptr;
3584
3585                                     r_tsptr = tcg_temp_new_ptr();
3586                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3587                                                    offsetof(CPUState, tsptr));
3588                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3589                                                   offsetof(trap_state,
3590                                                            tstate));
3591                                     tcg_temp_free_ptr(r_tsptr);
3592                                 }
3593                                 break;
3594                             case 3: // tt
3595                                 {
3596                                     TCGv_ptr r_tsptr;
3597
3598                                     r_tsptr = tcg_temp_new_ptr();
3599                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3600                                                    offsetof(CPUState, tsptr));
3601                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3602                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3603                                                    offsetof(trap_state, tt));
3604                                     tcg_temp_free_ptr(r_tsptr);
3605                                 }
3606                                 break;
3607                             case 4: // tick
3608                                 {
3609                                     TCGv_ptr r_tickptr;
3610
3611                                     r_tickptr = tcg_temp_new_ptr();
3612                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3613                                                    offsetof(CPUState, tick));
3614                                     gen_helper_tick_set_count(r_tickptr,
3615                                                               cpu_tmp0);
3616                                     tcg_temp_free_ptr(r_tickptr);
3617                                 }
3618                                 break;
3619                             case 5: // tba
3620                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3621                                 break;
3622                             case 6: // pstate
3623                                 save_state(dc, cpu_cond);
3624                                 gen_helper_wrpstate(cpu_tmp0);
3625                                 gen_op_next_insn();
3626                                 tcg_gen_exit_tb(0);
3627                                 dc->is_br = 1;
3628                                 break;
3629                             case 7: // tl
3630                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3631                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3632                                                offsetof(CPUSPARCState, tl));
3633                                 break;
3634                             case 8: // pil
3635                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3636                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3637                                                offsetof(CPUSPARCState,
3638                                                         psrpil));
3639                                 break;
3640                             case 9: // cwp
3641                                 gen_helper_wrcwp(cpu_tmp0);
3642                                 break;
3643                             case 10: // cansave
3644                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3645                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3646                                                offsetof(CPUSPARCState,
3647                                                         cansave));
3648                                 break;
3649                             case 11: // canrestore
3650                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3651                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3652                                                offsetof(CPUSPARCState,
3653                                                         canrestore));
3654                                 break;
3655                             case 12: // cleanwin
3656                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3657                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3658                                                offsetof(CPUSPARCState,
3659                                                         cleanwin));
3660                                 break;
3661                             case 13: // otherwin
3662                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3663                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3664                                                offsetof(CPUSPARCState,
3665                                                         otherwin));
3666                                 break;
3667                             case 14: // wstate
3668                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3669                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3670                                                offsetof(CPUSPARCState,
3671                                                         wstate));
3672                                 break;
3673                             case 16: // UA2005 gl
3674                                 CHECK_IU_FEATURE(dc, GL);
3675                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3676                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3677                                                offsetof(CPUSPARCState, gl));
3678                                 break;
3679                             case 26: // UA2005 strand status
3680                                 CHECK_IU_FEATURE(dc, HYPV);
3681                                 if (!hypervisor(dc))
3682                                     goto priv_insn;
3683                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3684                                 break;
3685                             default:
3686                                 goto illegal_insn;
3687                             }
3688 #else
3689                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3690                             if (dc->def->nwindows != 32)
3691                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3692                                                 (1 << dc->def->nwindows) - 1);
3693                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3694 #endif
3695                         }
3696                         break;
3697                     case 0x33: /* wrtbr, UA2005 wrhpr */
3698                         {
3699 #ifndef TARGET_SPARC64
3700                             if (!supervisor(dc))
3701                                 goto priv_insn;
3702                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3703 #else
3704                             CHECK_IU_FEATURE(dc, HYPV);
3705                             if (!hypervisor(dc))
3706                                 goto priv_insn;
3707                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3708                             switch (rd) {
3709                             case 0: // hpstate
3710                                 // XXX gen_op_wrhpstate();
3711                                 save_state(dc, cpu_cond);
3712                                 gen_op_next_insn();
3713                                 tcg_gen_exit_tb(0);
3714                                 dc->is_br = 1;
3715                                 break;
3716                             case 1: // htstate
3717                                 // XXX gen_op_wrhtstate();
3718                                 break;
3719                             case 3: // hintp
3720                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3721                                 break;
3722                             case 5: // htba
3723                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3724                                 break;
3725                             case 31: // hstick_cmpr
3726                                 {
3727                                     TCGv_ptr r_tickptr;
3728
3729                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3730                                     r_tickptr = tcg_temp_new_ptr();
3731                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3732                                                    offsetof(CPUState, hstick));
3733                                     gen_helper_tick_set_limit(r_tickptr,
3734                                                               cpu_hstick_cmpr);
3735                                     tcg_temp_free_ptr(r_tickptr);
3736                                 }
3737                                 break;
3738                             case 6: // hver readonly
3739                             default:
3740                                 goto illegal_insn;
3741                             }
3742 #endif
3743                         }
3744                         break;
3745 #endif
3746 #ifdef TARGET_SPARC64
3747                     case 0x2c: /* V9 movcc */
3748                         {
3749                             int cc = GET_FIELD_SP(insn, 11, 12);
3750                             int cond = GET_FIELD_SP(insn, 14, 17);
3751                             TCGv r_cond;
3752                             int l1;
3753
3754                             r_cond = tcg_temp_new();
3755                             if (insn & (1 << 18)) {
3756                                 if (cc == 0)
3757                                     gen_cond(r_cond, 0, cond, dc);
3758                                 else if (cc == 2)
3759                                     gen_cond(r_cond, 1, cond, dc);
3760                                 else
3761                                     goto illegal_insn;
3762                             } else {
3763                                 gen_fcond(r_cond, cc, cond);
3764                             }
3765
3766                             l1 = gen_new_label();
3767
3768                             tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3769                             if (IS_IMM) {       /* immediate */
3770                                 TCGv r_const;
3771
3772                                 simm = GET_FIELD_SPs(insn, 0, 10);
3773                                 r_const = tcg_const_tl(simm);
3774                                 gen_movl_TN_reg(rd, r_const);
3775                                 tcg_temp_free(r_const);
3776                             } else {
3777                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3778                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3779                                 gen_movl_TN_reg(rd, cpu_tmp0);
3780                             }
3781                             gen_set_label(l1);
3782                             tcg_temp_free(r_cond);
3783                             break;
3784                         }
3785                     case 0x2d: /* V9 sdivx */
3786                         gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3787                         gen_movl_TN_reg(rd, cpu_dst);
3788                         break;
3789                     case 0x2e: /* V9 popc */
3790                         {
3791                             cpu_src2 = get_src2(insn, cpu_src2);
3792                             gen_helper_popc(cpu_dst, cpu_src2);
3793                             gen_movl_TN_reg(rd, cpu_dst);
3794                         }
3795                     case 0x2f: /* V9 movr */
3796                         {
3797                             int cond = GET_FIELD_SP(insn, 10, 12);
3798                             int l1;
3799
3800                             cpu_src1 = get_src1(insn, cpu_src1);
3801
3802                             l1 = gen_new_label();
3803
3804                             tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3805                                               cpu_src1, 0, l1);
3806                             if (IS_IMM) {       /* immediate */
3807                                 TCGv r_const;
3808
3809                                 simm = GET_FIELD_SPs(insn, 0, 9);
3810                                 r_const = tcg_const_tl(simm);
3811                                 gen_movl_TN_reg(rd, r_const);
3812                                 tcg_temp_free(r_const);
3813                             } else {
3814                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3815                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3816                                 gen_movl_TN_reg(rd, cpu_tmp0);
3817                             }
3818                             gen_set_label(l1);
3819                             break;
3820                         }
3821 #endif
3822                     default:
3823                         goto illegal_insn;
3824                     }
3825                 }
3826             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3827 #ifdef TARGET_SPARC64
3828                 int opf = GET_FIELD_SP(insn, 5, 13);
3829                 rs1 = GET_FIELD(insn, 13, 17);
3830                 rs2 = GET_FIELD(insn, 27, 31);
3831                 if (gen_trap_ifnofpu(dc, cpu_cond))
3832                     goto jmp_insn;
3833
3834                 switch (opf) {
3835                 case 0x000: /* VIS I edge8cc */
3836                 case 0x001: /* VIS II edge8n */
3837                 case 0x002: /* VIS I edge8lcc */
3838                 case 0x003: /* VIS II edge8ln */
3839                 case 0x004: /* VIS I edge16cc */
3840                 case 0x005: /* VIS II edge16n */
3841                 case 0x006: /* VIS I edge16lcc */
3842                 case 0x007: /* VIS II edge16ln */
3843                 case 0x008: /* VIS I edge32cc */
3844                 case 0x009: /* VIS II edge32n */
3845                 case 0x00a: /* VIS I edge32lcc */
3846                 case 0x00b: /* VIS II edge32ln */
3847                     // XXX
3848                     goto illegal_insn;
3849                 case 0x010: /* VIS I array8 */
3850                     CHECK_FPU_FEATURE(dc, VIS1);
3851                     cpu_src1 = get_src1(insn, cpu_src1);
3852                     gen_movl_reg_TN(rs2, cpu_src2);
3853                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3854                     gen_movl_TN_reg(rd, cpu_dst);
3855                     break;
3856                 case 0x012: /* VIS I array16 */
3857                     CHECK_FPU_FEATURE(dc, VIS1);
3858                     cpu_src1 = get_src1(insn, cpu_src1);
3859                     gen_movl_reg_TN(rs2, cpu_src2);
3860                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3861                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3862                     gen_movl_TN_reg(rd, cpu_dst);
3863                     break;
3864                 case 0x014: /* VIS I array32 */
3865                     CHECK_FPU_FEATURE(dc, VIS1);
3866                     cpu_src1 = get_src1(insn, cpu_src1);
3867                     gen_movl_reg_TN(rs2, cpu_src2);
3868                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3869                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3870                     gen_movl_TN_reg(rd, cpu_dst);
3871                     break;
3872                 case 0x018: /* VIS I alignaddr */
3873                     CHECK_FPU_FEATURE(dc, VIS1);
3874                     cpu_src1 = get_src1(insn, cpu_src1);
3875                     gen_movl_reg_TN(rs2, cpu_src2);
3876                     gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3877                     gen_movl_TN_reg(rd, cpu_dst);
3878                     break;
3879                 case 0x019: /* VIS II bmask */
3880                 case 0x01a: /* VIS I alignaddrl */
3881                     // XXX
3882                     goto illegal_insn;
3883                 case 0x020: /* VIS I fcmple16 */
3884                     CHECK_FPU_FEATURE(dc, VIS1);
3885                     gen_op_load_fpr_DT0(DFPREG(rs1));
3886                     gen_op_load_fpr_DT1(DFPREG(rs2));
3887                     gen_helper_fcmple16();
3888                     gen_op_store_DT0_fpr(DFPREG(rd));
3889                     break;
3890                 case 0x022: /* VIS I fcmpne16 */
3891                     CHECK_FPU_FEATURE(dc, VIS1);
3892                     gen_op_load_fpr_DT0(DFPREG(rs1));
3893                     gen_op_load_fpr_DT1(DFPREG(rs2));
3894                     gen_helper_fcmpne16();
3895                     gen_op_store_DT0_fpr(DFPREG(rd));
3896                     break;
3897                 case 0x024: /* VIS I fcmple32 */
3898                     CHECK_FPU_FEATURE(dc, VIS1);
3899                     gen_op_load_fpr_DT0(DFPREG(rs1));
3900                     gen_op_load_fpr_DT1(DFPREG(rs2));
3901                     gen_helper_fcmple32();
3902                     gen_op_store_DT0_fpr(DFPREG(rd));
3903                     break;
3904                 case 0x026: /* VIS I fcmpne32 */
3905                     CHECK_FPU_FEATURE(dc, VIS1);
3906                     gen_op_load_fpr_DT0(DFPREG(rs1));
3907                     gen_op_load_fpr_DT1(DFPREG(rs2));
3908                     gen_helper_fcmpne32();
3909                     gen_op_store_DT0_fpr(DFPREG(rd));
3910                     break;
3911                 case 0x028: /* VIS I fcmpgt16 */
3912                     CHECK_FPU_FEATURE(dc, VIS1);
3913                     gen_op_load_fpr_DT0(DFPREG(rs1));
3914                     gen_op_load_fpr_DT1(DFPREG(rs2));
3915                     gen_helper_fcmpgt16();
3916                     gen_op_store_DT0_fpr(DFPREG(rd));
3917                     break;
3918                 case 0x02a: /* VIS I fcmpeq16 */
3919                     CHECK_FPU_FEATURE(dc, VIS1);
3920                     gen_op_load_fpr_DT0(DFPREG(rs1));
3921                     gen_op_load_fpr_DT1(DFPREG(rs2));
3922                     gen_helper_fcmpeq16();
3923                     gen_op_store_DT0_fpr(DFPREG(rd));
3924                     break;
3925                 case 0x02c: /* VIS I fcmpgt32 */
3926                     CHECK_FPU_FEATURE(dc, VIS1);
3927                     gen_op_load_fpr_DT0(DFPREG(rs1));
3928                     gen_op_load_fpr_DT1(DFPREG(rs2));
3929                     gen_helper_fcmpgt32();
3930                     gen_op_store_DT0_fpr(DFPREG(rd));
3931                     break;
3932                 case 0x02e: /* VIS I fcmpeq32 */
3933                     CHECK_FPU_FEATURE(dc, VIS1);
3934                     gen_op_load_fpr_DT0(DFPREG(rs1));
3935                     gen_op_load_fpr_DT1(DFPREG(rs2));
3936                     gen_helper_fcmpeq32();
3937                     gen_op_store_DT0_fpr(DFPREG(rd));
3938                     break;
3939                 case 0x031: /* VIS I fmul8x16 */
3940                     CHECK_FPU_FEATURE(dc, VIS1);
3941                     gen_op_load_fpr_DT0(DFPREG(rs1));
3942                     gen_op_load_fpr_DT1(DFPREG(rs2));
3943                     gen_helper_fmul8x16();
3944                     gen_op_store_DT0_fpr(DFPREG(rd));
3945                     break;
3946                 case 0x033: /* VIS I fmul8x16au */
3947                     CHECK_FPU_FEATURE(dc, VIS1);
3948                     gen_op_load_fpr_DT0(DFPREG(rs1));
3949                     gen_op_load_fpr_DT1(DFPREG(rs2));
3950                     gen_helper_fmul8x16au();
3951                     gen_op_store_DT0_fpr(DFPREG(rd));
3952                     break;
3953                 case 0x035: /* VIS I fmul8x16al */
3954                     CHECK_FPU_FEATURE(dc, VIS1);
3955                     gen_op_load_fpr_DT0(DFPREG(rs1));
3956                     gen_op_load_fpr_DT1(DFPREG(rs2));
3957                     gen_helper_fmul8x16al();
3958                     gen_op_store_DT0_fpr(DFPREG(rd));
3959                     break;
3960                 case 0x036: /* VIS I fmul8sux16 */
3961                     CHECK_FPU_FEATURE(dc, VIS1);
3962                     gen_op_load_fpr_DT0(DFPREG(rs1));
3963                     gen_op_load_fpr_DT1(DFPREG(rs2));
3964                     gen_helper_fmul8sux16();
3965                     gen_op_store_DT0_fpr(DFPREG(rd));
3966                     break;
3967                 case 0x037: /* VIS I fmul8ulx16 */
3968                     CHECK_FPU_FEATURE(dc, VIS1);
3969                     gen_op_load_fpr_DT0(DFPREG(rs1));
3970                     gen_op_load_fpr_DT1(DFPREG(rs2));
3971                     gen_helper_fmul8ulx16();
3972                     gen_op_store_DT0_fpr(DFPREG(rd));
3973                     break;
3974                 case 0x038: /* VIS I fmuld8sux16 */
3975                     CHECK_FPU_FEATURE(dc, VIS1);
3976                     gen_op_load_fpr_DT0(DFPREG(rs1));
3977                     gen_op_load_fpr_DT1(DFPREG(rs2));
3978                     gen_helper_fmuld8sux16();
3979                     gen_op_store_DT0_fpr(DFPREG(rd));
3980                     break;
3981                 case 0x039: /* VIS I fmuld8ulx16 */
3982                     CHECK_FPU_FEATURE(dc, VIS1);
3983                     gen_op_load_fpr_DT0(DFPREG(rs1));
3984                     gen_op_load_fpr_DT1(DFPREG(rs2));
3985                     gen_helper_fmuld8ulx16();
3986                     gen_op_store_DT0_fpr(DFPREG(rd));
3987                     break;
3988                 case 0x03a: /* VIS I fpack32 */
3989                 case 0x03b: /* VIS I fpack16 */
3990                 case 0x03d: /* VIS I fpackfix */
3991                 case 0x03e: /* VIS I pdist */
3992                     // XXX
3993                     goto illegal_insn;
3994                 case 0x048: /* VIS I faligndata */
3995                     CHECK_FPU_FEATURE(dc, VIS1);
3996                     gen_op_load_fpr_DT0(DFPREG(rs1));
3997                     gen_op_load_fpr_DT1(DFPREG(rs2));
3998                     gen_helper_faligndata();
3999                     gen_op_store_DT0_fpr(DFPREG(rd));
4000                     break;
4001                 case 0x04b: /* VIS I fpmerge */
4002                     CHECK_FPU_FEATURE(dc, VIS1);
4003                     gen_op_load_fpr_DT0(DFPREG(rs1));
4004                     gen_op_load_fpr_DT1(DFPREG(rs2));
4005                     gen_helper_fpmerge();
4006                     gen_op_store_DT0_fpr(DFPREG(rd));
4007                     break;
4008                 case 0x04c: /* VIS II bshuffle */
4009                     // XXX
4010                     goto illegal_insn;
4011                 case 0x04d: /* VIS I fexpand */
4012                     CHECK_FPU_FEATURE(dc, VIS1);
4013                     gen_op_load_fpr_DT0(DFPREG(rs1));
4014                     gen_op_load_fpr_DT1(DFPREG(rs2));
4015                     gen_helper_fexpand();
4016                     gen_op_store_DT0_fpr(DFPREG(rd));
4017                     break;
4018                 case 0x050: /* VIS I fpadd16 */
4019                     CHECK_FPU_FEATURE(dc, VIS1);
4020                     gen_op_load_fpr_DT0(DFPREG(rs1));
4021                     gen_op_load_fpr_DT1(DFPREG(rs2));
4022                     gen_helper_fpadd16();
4023                     gen_op_store_DT0_fpr(DFPREG(rd));
4024                     break;
4025                 case 0x051: /* VIS I fpadd16s */
4026                     CHECK_FPU_FEATURE(dc, VIS1);
4027                     gen_helper_fpadd16s(cpu_fpr[rd],
4028                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4029                     break;
4030                 case 0x052: /* VIS I fpadd32 */
4031                     CHECK_FPU_FEATURE(dc, VIS1);
4032                     gen_op_load_fpr_DT0(DFPREG(rs1));
4033                     gen_op_load_fpr_DT1(DFPREG(rs2));
4034                     gen_helper_fpadd32();
4035                     gen_op_store_DT0_fpr(DFPREG(rd));
4036                     break;
4037                 case 0x053: /* VIS I fpadd32s */
4038                     CHECK_FPU_FEATURE(dc, VIS1);
4039                     gen_helper_fpadd32s(cpu_fpr[rd],
4040                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4041                     break;
4042                 case 0x054: /* VIS I fpsub16 */
4043                     CHECK_FPU_FEATURE(dc, VIS1);
4044                     gen_op_load_fpr_DT0(DFPREG(rs1));
4045                     gen_op_load_fpr_DT1(DFPREG(rs2));
4046                     gen_helper_fpsub16();
4047                     gen_op_store_DT0_fpr(DFPREG(rd));
4048                     break;
4049                 case 0x055: /* VIS I fpsub16s */
4050                     CHECK_FPU_FEATURE(dc, VIS1);
4051                     gen_helper_fpsub16s(cpu_fpr[rd],
4052                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4053                     break;
4054                 case 0x056: /* VIS I fpsub32 */
4055                     CHECK_FPU_FEATURE(dc, VIS1);
4056                     gen_op_load_fpr_DT0(DFPREG(rs1));
4057                     gen_op_load_fpr_DT1(DFPREG(rs2));
4058                     gen_helper_fpsub32();
4059                     gen_op_store_DT0_fpr(DFPREG(rd));
4060                     break;
4061                 case 0x057: /* VIS I fpsub32s */
4062                     CHECK_FPU_FEATURE(dc, VIS1);
4063                     gen_helper_fpsub32s(cpu_fpr[rd],
4064                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4065                     break;
4066                 case 0x060: /* VIS I fzero */
4067                     CHECK_FPU_FEATURE(dc, VIS1);
4068                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4069                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4070                     break;
4071                 case 0x061: /* VIS I fzeros */
4072                     CHECK_FPU_FEATURE(dc, VIS1);
4073                     tcg_gen_movi_i32(cpu_fpr[rd], 0);
4074                     break;
4075                 case 0x062: /* VIS I fnor */
4076                     CHECK_FPU_FEATURE(dc, VIS1);
4077                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4078                                     cpu_fpr[DFPREG(rs2)]);
4079                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4080                                     cpu_fpr[DFPREG(rs2) + 1]);
4081                     break;
4082                 case 0x063: /* VIS I fnors */
4083                     CHECK_FPU_FEATURE(dc, VIS1);
4084                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4085                     break;
4086                 case 0x064: /* VIS I fandnot2 */
4087                     CHECK_FPU_FEATURE(dc, VIS1);
4088                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4089                                      cpu_fpr[DFPREG(rs2)]);
4090                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4091                                      cpu_fpr[DFPREG(rs1) + 1],
4092                                      cpu_fpr[DFPREG(rs2) + 1]);
4093                     break;
4094                 case 0x065: /* VIS I fandnot2s */
4095                     CHECK_FPU_FEATURE(dc, VIS1);
4096                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4097                     break;
4098                 case 0x066: /* VIS I fnot2 */
4099                     CHECK_FPU_FEATURE(dc, VIS1);
4100                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4101                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4102                                     cpu_fpr[DFPREG(rs2) + 1]);
4103                     break;
4104                 case 0x067: /* VIS I fnot2s */
4105                     CHECK_FPU_FEATURE(dc, VIS1);
4106                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4107                     break;
4108                 case 0x068: /* VIS I fandnot1 */
4109                     CHECK_FPU_FEATURE(dc, VIS1);
4110                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4111                                      cpu_fpr[DFPREG(rs1)]);
4112                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4113                                      cpu_fpr[DFPREG(rs2) + 1],
4114                                      cpu_fpr[DFPREG(rs1) + 1]);
4115                     break;
4116                 case 0x069: /* VIS I fandnot1s */
4117                     CHECK_FPU_FEATURE(dc, VIS1);
4118                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4119                     break;
4120                 case 0x06a: /* VIS I fnot1 */
4121                     CHECK_FPU_FEATURE(dc, VIS1);
4122                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4123                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4124                                     cpu_fpr[DFPREG(rs1) + 1]);
4125                     break;
4126                 case 0x06b: /* VIS I fnot1s */
4127                     CHECK_FPU_FEATURE(dc, VIS1);
4128                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4129                     break;
4130                 case 0x06c: /* VIS I fxor */
4131                     CHECK_FPU_FEATURE(dc, VIS1);
4132                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4133                                     cpu_fpr[DFPREG(rs2)]);
4134                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4135                                     cpu_fpr[DFPREG(rs1) + 1],
4136                                     cpu_fpr[DFPREG(rs2) + 1]);
4137                     break;
4138                 case 0x06d: /* VIS I fxors */
4139                     CHECK_FPU_FEATURE(dc, VIS1);
4140                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4141                     break;
4142                 case 0x06e: /* VIS I fnand */
4143                     CHECK_FPU_FEATURE(dc, VIS1);
4144                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4145                                      cpu_fpr[DFPREG(rs2)]);
4146                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4147                                      cpu_fpr[DFPREG(rs2) + 1]);
4148                     break;
4149                 case 0x06f: /* VIS I fnands */
4150                     CHECK_FPU_FEATURE(dc, VIS1);
4151                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4152                     break;
4153                 case 0x070: /* VIS I fand */
4154                     CHECK_FPU_FEATURE(dc, VIS1);
4155                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4156                                     cpu_fpr[DFPREG(rs2)]);
4157                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4158                                     cpu_fpr[DFPREG(rs1) + 1],
4159                                     cpu_fpr[DFPREG(rs2) + 1]);
4160                     break;
4161                 case 0x071: /* VIS I fands */
4162                     CHECK_FPU_FEATURE(dc, VIS1);
4163                     tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4164                     break;
4165                 case 0x072: /* VIS I fxnor */
4166                     CHECK_FPU_FEATURE(dc, VIS1);
4167                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4168                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4169                                     cpu_fpr[DFPREG(rs1)]);
4170                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4171                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4172                                     cpu_fpr[DFPREG(rs1) + 1]);
4173                     break;
4174                 case 0x073: /* VIS I fxnors */
4175                     CHECK_FPU_FEATURE(dc, VIS1);
4176                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4177                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4178                     break;
4179                 case 0x074: /* VIS I fsrc1 */
4180                     CHECK_FPU_FEATURE(dc, VIS1);
4181                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4182                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4183                                     cpu_fpr[DFPREG(rs1) + 1]);
4184                     break;
4185                 case 0x075: /* VIS I fsrc1s */
4186                     CHECK_FPU_FEATURE(dc, VIS1);
4187                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4188                     break;
4189                 case 0x076: /* VIS I fornot2 */
4190                     CHECK_FPU_FEATURE(dc, VIS1);
4191                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4192                                     cpu_fpr[DFPREG(rs2)]);
4193                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4194                                     cpu_fpr[DFPREG(rs1) + 1],
4195                                     cpu_fpr[DFPREG(rs2) + 1]);
4196                     break;
4197                 case 0x077: /* VIS I fornot2s */
4198                     CHECK_FPU_FEATURE(dc, VIS1);
4199                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4200                     break;
4201                 case 0x078: /* VIS I fsrc2 */
4202                     CHECK_FPU_FEATURE(dc, VIS1);
4203                     gen_op_load_fpr_DT0(DFPREG(rs2));
4204                     gen_op_store_DT0_fpr(DFPREG(rd));
4205                     break;
4206                 case 0x079: /* VIS I fsrc2s */
4207                     CHECK_FPU_FEATURE(dc, VIS1);
4208                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4209                     break;
4210                 case 0x07a: /* VIS I fornot1 */
4211                     CHECK_FPU_FEATURE(dc, VIS1);
4212                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4213                                     cpu_fpr[DFPREG(rs1)]);
4214                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4215                                     cpu_fpr[DFPREG(rs2) + 1],
4216                                     cpu_fpr[DFPREG(rs1) + 1]);
4217                     break;
4218                 case 0x07b: /* VIS I fornot1s */
4219                     CHECK_FPU_FEATURE(dc, VIS1);
4220                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4221                     break;
4222                 case 0x07c: /* VIS I for */
4223                     CHECK_FPU_FEATURE(dc, VIS1);
4224                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4225                                    cpu_fpr[DFPREG(rs2)]);
4226                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4227                                    cpu_fpr[DFPREG(rs1) + 1],
4228                                    cpu_fpr[DFPREG(rs2) + 1]);
4229                     break;
4230                 case 0x07d: /* VIS I fors */
4231                     CHECK_FPU_FEATURE(dc, VIS1);
4232                     tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4233                     break;
4234                 case 0x07e: /* VIS I fone */
4235                     CHECK_FPU_FEATURE(dc, VIS1);
4236                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4237                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4238                     break;
4239                 case 0x07f: /* VIS I fones */
4240                     CHECK_FPU_FEATURE(dc, VIS1);
4241                     tcg_gen_movi_i32(cpu_fpr[rd], -1);
4242                     break;
4243                 case 0x080: /* VIS I shutdown */
4244                 case 0x081: /* VIS II siam */
4245                     // XXX
4246                     goto illegal_insn;
4247                 default:
4248                     goto illegal_insn;
4249                 }
4250 #else
4251                 goto ncp_insn;
4252 #endif
4253             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4254 #ifdef TARGET_SPARC64
4255                 goto illegal_insn;
4256 #else
4257                 goto ncp_insn;
4258 #endif
4259 #ifdef TARGET_SPARC64
4260             } else if (xop == 0x39) { /* V9 return */
4261                 TCGv_i32 r_const;
4262
4263                 save_state(dc, cpu_cond);
4264                 cpu_src1 = get_src1(insn, cpu_src1);
4265                 if (IS_IMM) {   /* immediate */
4266                     simm = GET_FIELDs(insn, 19, 31);
4267                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4268                 } else {                /* register */
4269                     rs2 = GET_FIELD(insn, 27, 31);
4270                     if (rs2) {
4271                         gen_movl_reg_TN(rs2, cpu_src2);
4272                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4273                     } else
4274                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4275                 }
4276                 gen_helper_restore();
4277                 gen_mov_pc_npc(dc, cpu_cond);
4278                 r_const = tcg_const_i32(3);
4279                 gen_helper_check_align(cpu_dst, r_const);
4280                 tcg_temp_free_i32(r_const);
4281                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4282                 dc->npc = DYNAMIC_PC;
4283                 goto jmp_insn;
4284 #endif
4285             } else {
4286                 cpu_src1 = get_src1(insn, cpu_src1);
4287                 if (IS_IMM) {   /* immediate */
4288                     simm = GET_FIELDs(insn, 19, 31);
4289                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4290                 } else {                /* register */
4291                     rs2 = GET_FIELD(insn, 27, 31);
4292                     if (rs2) {
4293                         gen_movl_reg_TN(rs2, cpu_src2);
4294                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4295                     } else
4296                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4297                 }
4298                 switch (xop) {
4299                 case 0x38:      /* jmpl */
4300                     {
4301                         TCGv r_pc;
4302                         TCGv_i32 r_const;
4303
4304                         r_pc = tcg_const_tl(dc->pc);
4305                         gen_movl_TN_reg(rd, r_pc);
4306                         tcg_temp_free(r_pc);
4307                         gen_mov_pc_npc(dc, cpu_cond);
4308                         r_const = tcg_const_i32(3);
4309                         gen_helper_check_align(cpu_dst, r_const);
4310                         tcg_temp_free_i32(r_const);
4311                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4312                         dc->npc = DYNAMIC_PC;
4313                     }
4314                     goto jmp_insn;
4315 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4316                 case 0x39:      /* rett, V9 return */
4317                     {
4318                         TCGv_i32 r_const;
4319
4320                         if (!supervisor(dc))
4321                             goto priv_insn;
4322                         gen_mov_pc_npc(dc, cpu_cond);
4323                         r_const = tcg_const_i32(3);
4324                         gen_helper_check_align(cpu_dst, r_const);
4325                         tcg_temp_free_i32(r_const);
4326                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4327                         dc->npc = DYNAMIC_PC;
4328                         gen_helper_rett();
4329                     }
4330                     goto jmp_insn;
4331 #endif
4332                 case 0x3b: /* flush */
4333                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4334                         goto unimp_flush;
4335                     gen_helper_flush(cpu_dst);
4336                     break;
4337                 case 0x3c:      /* save */
4338                     save_state(dc, cpu_cond);
4339                     gen_helper_save();
4340                     gen_movl_TN_reg(rd, cpu_dst);
4341                     break;
4342                 case 0x3d:      /* restore */
4343                     save_state(dc, cpu_cond);
4344                     gen_helper_restore();
4345                     gen_movl_TN_reg(rd, cpu_dst);
4346                     break;
4347 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4348                 case 0x3e:      /* V9 done/retry */
4349                     {
4350                         switch (rd) {
4351                         case 0:
4352                             if (!supervisor(dc))
4353                                 goto priv_insn;
4354                             dc->npc = DYNAMIC_PC;
4355                             dc->pc = DYNAMIC_PC;
4356                             gen_helper_done();
4357                             goto jmp_insn;
4358                         case 1:
4359                             if (!supervisor(dc))
4360                                 goto priv_insn;
4361                             dc->npc = DYNAMIC_PC;
4362                             dc->pc = DYNAMIC_PC;
4363                             gen_helper_retry();
4364                             goto jmp_insn;
4365                         default:
4366                             goto illegal_insn;
4367                         }
4368                     }
4369                     break;
4370 #endif
4371                 default:
4372                     goto illegal_insn;
4373                 }
4374             }
4375             break;
4376         }
4377         break;
4378     case 3:                     /* load/store instructions */
4379         {
4380             unsigned int xop = GET_FIELD(insn, 7, 12);
4381
4382             cpu_src1 = get_src1(insn, cpu_src1);
4383             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4384                 rs2 = GET_FIELD(insn, 27, 31);
4385                 gen_movl_reg_TN(rs2, cpu_src2);
4386                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4387             } else if (IS_IMM) {     /* immediate */
4388                 simm = GET_FIELDs(insn, 19, 31);
4389                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4390             } else {            /* register */
4391                 rs2 = GET_FIELD(insn, 27, 31);
4392                 if (rs2 != 0) {
4393                     gen_movl_reg_TN(rs2, cpu_src2);
4394                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4395                 } else
4396                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4397             }
4398             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4399                 (xop > 0x17 && xop <= 0x1d ) ||
4400                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4401                 switch (xop) {
4402                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4403                     gen_address_mask(dc, cpu_addr);
4404                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4405                     break;
4406                 case 0x1:       /* ldub, load unsigned byte */
4407                     gen_address_mask(dc, cpu_addr);
4408                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4409                     break;
4410                 case 0x2:       /* lduh, load unsigned halfword */
4411                     gen_address_mask(dc, cpu_addr);
4412                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4413                     break;
4414                 case 0x3:       /* ldd, load double word */
4415                     if (rd & 1)
4416                         goto illegal_insn;
4417                     else {
4418                         TCGv_i32 r_const;
4419
4420                         save_state(dc, cpu_cond);
4421                         r_const = tcg_const_i32(7);
4422                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4423                         tcg_temp_free_i32(r_const);
4424                         gen_address_mask(dc, cpu_addr);
4425                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4426                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4427                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4428                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4429                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4430                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4431                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4432                     }
4433                     break;
4434                 case 0x9:       /* ldsb, load signed byte */
4435                     gen_address_mask(dc, cpu_addr);
4436                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4437                     break;
4438                 case 0xa:       /* ldsh, load signed halfword */
4439                     gen_address_mask(dc, cpu_addr);
4440                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4441                     break;
4442                 case 0xd:       /* ldstub -- XXX: should be atomically */
4443                     {
4444                         TCGv r_const;
4445
4446                         gen_address_mask(dc, cpu_addr);
4447                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4448                         r_const = tcg_const_tl(0xff);
4449                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4450                         tcg_temp_free(r_const);
4451                     }
4452                     break;
4453                 case 0x0f:      /* swap, swap register with memory. Also
4454                                    atomically */
4455                     CHECK_IU_FEATURE(dc, SWAP);
4456                     gen_movl_reg_TN(rd, cpu_val);
4457                     gen_address_mask(dc, cpu_addr);
4458                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4459                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4460                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4461                     break;
4462 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4463                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4464 #ifndef TARGET_SPARC64
4465                     if (IS_IMM)
4466                         goto illegal_insn;
4467                     if (!supervisor(dc))
4468                         goto priv_insn;
4469 #endif
4470                     save_state(dc, cpu_cond);
4471                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4472                     break;
4473                 case 0x11:      /* lduba, load unsigned byte alternate */
4474 #ifndef TARGET_SPARC64
4475                     if (IS_IMM)
4476                         goto illegal_insn;
4477                     if (!supervisor(dc))
4478                         goto priv_insn;
4479 #endif
4480                     save_state(dc, cpu_cond);
4481                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4482                     break;
4483                 case 0x12:      /* lduha, load unsigned halfword alternate */
4484 #ifndef TARGET_SPARC64
4485                     if (IS_IMM)
4486                         goto illegal_insn;
4487                     if (!supervisor(dc))
4488                         goto priv_insn;
4489 #endif
4490                     save_state(dc, cpu_cond);
4491                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4492                     break;
4493                 case 0x13:      /* ldda, load double word alternate */
4494 #ifndef TARGET_SPARC64
4495                     if (IS_IMM)
4496                         goto illegal_insn;
4497                     if (!supervisor(dc))
4498                         goto priv_insn;
4499 #endif
4500                     if (rd & 1)
4501                         goto illegal_insn;
4502                     save_state(dc, cpu_cond);
4503                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4504                     goto skip_move;
4505                 case 0x19:      /* ldsba, load signed byte alternate */
4506 #ifndef TARGET_SPARC64
4507                     if (IS_IMM)
4508                         goto illegal_insn;
4509                     if (!supervisor(dc))
4510                         goto priv_insn;
4511 #endif
4512                     save_state(dc, cpu_cond);
4513                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4514                     break;
4515                 case 0x1a:      /* ldsha, load signed halfword alternate */
4516 #ifndef TARGET_SPARC64
4517                     if (IS_IMM)
4518                         goto illegal_insn;
4519                     if (!supervisor(dc))
4520                         goto priv_insn;
4521 #endif
4522                     save_state(dc, cpu_cond);
4523                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4524                     break;
4525                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4526 #ifndef TARGET_SPARC64
4527                     if (IS_IMM)
4528                         goto illegal_insn;
4529                     if (!supervisor(dc))
4530                         goto priv_insn;
4531 #endif
4532                     save_state(dc, cpu_cond);
4533                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4534                     break;
4535                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4536                                    atomically */
4537                     CHECK_IU_FEATURE(dc, SWAP);
4538 #ifndef TARGET_SPARC64
4539                     if (IS_IMM)
4540                         goto illegal_insn;
4541                     if (!supervisor(dc))
4542                         goto priv_insn;
4543 #endif
4544                     save_state(dc, cpu_cond);
4545                     gen_movl_reg_TN(rd, cpu_val);
4546                     gen_swap_asi(cpu_val, cpu_addr, insn);
4547                     break;
4548
4549 #ifndef TARGET_SPARC64
4550                 case 0x30: /* ldc */
4551                 case 0x31: /* ldcsr */
4552                 case 0x33: /* lddc */
4553                     goto ncp_insn;
4554 #endif
4555 #endif
4556 #ifdef TARGET_SPARC64
4557                 case 0x08: /* V9 ldsw */
4558                     gen_address_mask(dc, cpu_addr);
4559                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4560                     break;
4561                 case 0x0b: /* V9 ldx */
4562                     gen_address_mask(dc, cpu_addr);
4563                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4564                     break;
4565                 case 0x18: /* V9 ldswa */
4566                     save_state(dc, cpu_cond);
4567                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4568                     break;
4569                 case 0x1b: /* V9 ldxa */
4570                     save_state(dc, cpu_cond);
4571                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4572                     break;
4573                 case 0x2d: /* V9 prefetch, no effect */
4574                     goto skip_move;
4575                 case 0x30: /* V9 ldfa */
4576                     save_state(dc, cpu_cond);
4577                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4578                     goto skip_move;
4579                 case 0x33: /* V9 lddfa */
4580                     save_state(dc, cpu_cond);
4581                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4582                     goto skip_move;
4583                 case 0x3d: /* V9 prefetcha, no effect */
4584                     goto skip_move;
4585                 case 0x32: /* V9 ldqfa */
4586                     CHECK_FPU_FEATURE(dc, FLOAT128);
4587                     save_state(dc, cpu_cond);
4588                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4589                     goto skip_move;
4590 #endif
4591                 default:
4592                     goto illegal_insn;
4593                 }
4594                 gen_movl_TN_reg(rd, cpu_val);
4595 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4596             skip_move: ;
4597 #endif
4598             } else if (xop >= 0x20 && xop < 0x24) {
4599                 if (gen_trap_ifnofpu(dc, cpu_cond))
4600                     goto jmp_insn;
4601                 save_state(dc, cpu_cond);
4602                 switch (xop) {
4603                 case 0x20:      /* ldf, load fpreg */
4604                     gen_address_mask(dc, cpu_addr);
4605                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4606                     tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4607                     break;
4608                 case 0x21:      /* ldfsr, V9 ldxfsr */
4609 #ifdef TARGET_SPARC64
4610                     gen_address_mask(dc, cpu_addr);
4611                     if (rd == 1) {
4612                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4613                         gen_helper_ldxfsr(cpu_tmp64);
4614                     } else
4615 #else
4616                     {
4617                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4618                         gen_helper_ldfsr(cpu_tmp32);
4619                     }
4620 #endif
4621                     break;
4622                 case 0x22:      /* ldqf, load quad fpreg */
4623                     {
4624                         TCGv_i32 r_const;
4625
4626                         CHECK_FPU_FEATURE(dc, FLOAT128);
4627                         r_const = tcg_const_i32(dc->mem_idx);
4628                         gen_helper_ldqf(cpu_addr, r_const);
4629                         tcg_temp_free_i32(r_const);
4630                         gen_op_store_QT0_fpr(QFPREG(rd));
4631                     }
4632                     break;
4633                 case 0x23:      /* lddf, load double fpreg */
4634                     {
4635                         TCGv_i32 r_const;
4636
4637                         r_const = tcg_const_i32(dc->mem_idx);
4638                         gen_helper_lddf(cpu_addr, r_const);
4639                         tcg_temp_free_i32(r_const);
4640                         gen_op_store_DT0_fpr(DFPREG(rd));
4641                     }
4642                     break;
4643                 default:
4644                     goto illegal_insn;
4645                 }
4646             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4647                        xop == 0xe || xop == 0x1e) {
4648                 gen_movl_reg_TN(rd, cpu_val);
4649                 switch (xop) {
4650                 case 0x4: /* st, store word */
4651                     gen_address_mask(dc, cpu_addr);
4652                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4653                     break;
4654                 case 0x5: /* stb, store byte */
4655                     gen_address_mask(dc, cpu_addr);
4656                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4657                     break;
4658                 case 0x6: /* sth, store halfword */
4659                     gen_address_mask(dc, cpu_addr);
4660                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4661                     break;
4662                 case 0x7: /* std, store double word */
4663                     if (rd & 1)
4664                         goto illegal_insn;
4665                     else {
4666                         TCGv_i32 r_const;
4667
4668                         save_state(dc, cpu_cond);
4669                         gen_address_mask(dc, cpu_addr);
4670                         r_const = tcg_const_i32(7);
4671                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4672                         tcg_temp_free_i32(r_const);
4673                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4674                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4675                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4676                     }
4677                     break;
4678 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4679                 case 0x14: /* sta, V9 stwa, store word alternate */
4680 #ifndef TARGET_SPARC64
4681                     if (IS_IMM)
4682                         goto illegal_insn;
4683                     if (!supervisor(dc))
4684                         goto priv_insn;
4685 #endif
4686                     save_state(dc, cpu_cond);
4687                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4688                     break;
4689                 case 0x15: /* stba, store byte alternate */
4690 #ifndef TARGET_SPARC64
4691                     if (IS_IMM)
4692                         goto illegal_insn;
4693                     if (!supervisor(dc))
4694                         goto priv_insn;
4695 #endif
4696                     save_state(dc, cpu_cond);
4697                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4698                     break;
4699                 case 0x16: /* stha, store halfword alternate */
4700 #ifndef TARGET_SPARC64
4701                     if (IS_IMM)
4702                         goto illegal_insn;
4703                     if (!supervisor(dc))
4704                         goto priv_insn;
4705 #endif
4706                     save_state(dc, cpu_cond);
4707                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4708                     break;
4709                 case 0x17: /* stda, store double word alternate */
4710 #ifndef TARGET_SPARC64
4711                     if (IS_IMM)
4712                         goto illegal_insn;
4713                     if (!supervisor(dc))
4714                         goto priv_insn;
4715 #endif
4716                     if (rd & 1)
4717                         goto illegal_insn;
4718                     else {
4719                         save_state(dc, cpu_cond);
4720                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4721                     }
4722                     break;
4723 #endif
4724 #ifdef TARGET_SPARC64
4725                 case 0x0e: /* V9 stx */
4726                     gen_address_mask(dc, cpu_addr);
4727                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4728                     break;
4729                 case 0x1e: /* V9 stxa */
4730                     save_state(dc, cpu_cond);
4731                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4732                     break;
4733 #endif
4734                 default:
4735                     goto illegal_insn;
4736                 }
4737             } else if (xop > 0x23 && xop < 0x28) {
4738                 if (gen_trap_ifnofpu(dc, cpu_cond))
4739                     goto jmp_insn;
4740                 save_state(dc, cpu_cond);
4741                 switch (xop) {
4742                 case 0x24: /* stf, store fpreg */
4743                     gen_address_mask(dc, cpu_addr);
4744                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4745                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4746                     break;
4747                 case 0x25: /* stfsr, V9 stxfsr */
4748 #ifdef TARGET_SPARC64
4749                     gen_address_mask(dc, cpu_addr);
4750                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4751                     if (rd == 1)
4752                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4753                     else
4754                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4755 #else
4756                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4757                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4758 #endif
4759                     break;
4760                 case 0x26:
4761 #ifdef TARGET_SPARC64
4762                     /* V9 stqf, store quad fpreg */
4763                     {
4764                         TCGv_i32 r_const;
4765
4766                         CHECK_FPU_FEATURE(dc, FLOAT128);
4767                         gen_op_load_fpr_QT0(QFPREG(rd));
4768                         r_const = tcg_const_i32(dc->mem_idx);
4769                         gen_helper_stqf(cpu_addr, r_const);
4770                         tcg_temp_free_i32(r_const);
4771                     }
4772                     break;
4773 #else /* !TARGET_SPARC64 */
4774                     /* stdfq, store floating point queue */
4775 #if defined(CONFIG_USER_ONLY)
4776                     goto illegal_insn;
4777 #else
4778                     if (!supervisor(dc))
4779                         goto priv_insn;
4780                     if (gen_trap_ifnofpu(dc, cpu_cond))
4781                         goto jmp_insn;
4782                     goto nfq_insn;
4783 #endif
4784 #endif
4785                 case 0x27: /* stdf, store double fpreg */
4786                     {
4787                         TCGv_i32 r_const;
4788
4789                         gen_op_load_fpr_DT0(DFPREG(rd));
4790                         r_const = tcg_const_i32(dc->mem_idx);
4791                         gen_helper_stdf(cpu_addr, r_const);
4792                         tcg_temp_free_i32(r_const);
4793                     }
4794                     break;
4795                 default:
4796                     goto illegal_insn;
4797                 }
4798             } else if (xop > 0x33 && xop < 0x3f) {
4799                 save_state(dc, cpu_cond);
4800                 switch (xop) {
4801 #ifdef TARGET_SPARC64
4802                 case 0x34: /* V9 stfa */
4803                     gen_stf_asi(cpu_addr, insn, 4, rd);
4804                     break;
4805                 case 0x36: /* V9 stqfa */
4806                     {
4807                         TCGv_i32 r_const;
4808
4809                         CHECK_FPU_FEATURE(dc, FLOAT128);
4810                         r_const = tcg_const_i32(7);
4811                         gen_helper_check_align(cpu_addr, r_const);
4812                         tcg_temp_free_i32(r_const);
4813                         gen_op_load_fpr_QT0(QFPREG(rd));
4814                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4815                     }
4816                     break;
4817                 case 0x37: /* V9 stdfa */
4818                     gen_op_load_fpr_DT0(DFPREG(rd));
4819                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4820                     break;
4821                 case 0x3c: /* V9 casa */
4822                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4823                     gen_movl_TN_reg(rd, cpu_val);
4824                     break;
4825                 case 0x3e: /* V9 casxa */
4826                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4827                     gen_movl_TN_reg(rd, cpu_val);
4828                     break;
4829 #else
4830                 case 0x34: /* stc */
4831                 case 0x35: /* stcsr */
4832                 case 0x36: /* stdcq */
4833                 case 0x37: /* stdc */
4834                     goto ncp_insn;
4835 #endif
4836                 default:
4837                     goto illegal_insn;
4838                 }
4839             } else
4840                 goto illegal_insn;
4841         }
4842         break;
4843     }
4844     /* default case for non jump instructions */
4845     if (dc->npc == DYNAMIC_PC) {
4846         dc->pc = DYNAMIC_PC;
4847         gen_op_next_insn();
4848     } else if (dc->npc == JUMP_PC) {
4849         /* we can do a static jump */
4850         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4851         dc->is_br = 1;
4852     } else {
4853         dc->pc = dc->npc;
4854         dc->npc = dc->npc + 4;
4855     }
4856  jmp_insn:
4857     return;
4858  illegal_insn:
4859     {
4860         TCGv_i32 r_const;
4861
4862         save_state(dc, cpu_cond);
4863         r_const = tcg_const_i32(TT_ILL_INSN);
4864         gen_helper_raise_exception(r_const);
4865         tcg_temp_free_i32(r_const);
4866         dc->is_br = 1;
4867     }
4868     return;
4869  unimp_flush:
4870     {
4871         TCGv_i32 r_const;
4872
4873         save_state(dc, cpu_cond);
4874         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4875         gen_helper_raise_exception(r_const);
4876         tcg_temp_free_i32(r_const);
4877         dc->is_br = 1;
4878     }
4879     return;
4880 #if !defined(CONFIG_USER_ONLY)
4881  priv_insn:
4882     {
4883         TCGv_i32 r_const;
4884
4885         save_state(dc, cpu_cond);
4886         r_const = tcg_const_i32(TT_PRIV_INSN);
4887         gen_helper_raise_exception(r_const);
4888         tcg_temp_free_i32(r_const);
4889         dc->is_br = 1;
4890     }
4891     return;
4892 #endif
4893  nfpu_insn:
4894     save_state(dc, cpu_cond);
4895     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4896     dc->is_br = 1;
4897     return;
4898 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4899  nfq_insn:
4900     save_state(dc, cpu_cond);
4901     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4902     dc->is_br = 1;
4903     return;
4904 #endif
4905 #ifndef TARGET_SPARC64
4906  ncp_insn:
4907     {
4908         TCGv r_const;
4909
4910         save_state(dc, cpu_cond);
4911         r_const = tcg_const_i32(TT_NCP_INSN);
4912         gen_helper_raise_exception(r_const);
4913         tcg_temp_free(r_const);
4914         dc->is_br = 1;
4915     }
4916     return;
4917 #endif
4918 }
4919
4920 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4921                                                   int spc, CPUSPARCState *env)
4922 {
4923     target_ulong pc_start, last_pc;
4924     uint16_t *gen_opc_end;
4925     DisasContext dc1, *dc = &dc1;
4926     CPUBreakpoint *bp;
4927     int j, lj = -1;
4928     int num_insns;
4929     int max_insns;
4930
4931     memset(dc, 0, sizeof(DisasContext));
4932     dc->tb = tb;
4933     pc_start = tb->pc;
4934     dc->pc = pc_start;
4935     last_pc = dc->pc;
4936     dc->npc = (target_ulong) tb->cs_base;
4937     dc->cc_op = CC_OP_DYNAMIC;
4938     dc->mem_idx = cpu_mmu_index(env);
4939     dc->def = env->def;
4940     if ((dc->def->features & CPU_FEATURE_FLOAT))
4941         dc->fpu_enabled = cpu_fpu_enabled(env);
4942     else
4943         dc->fpu_enabled = 0;
4944 #ifdef TARGET_SPARC64
4945     dc->address_mask_32bit = env->pstate & PS_AM;
4946 #endif
4947     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4948
4949     cpu_tmp0 = tcg_temp_new();
4950     cpu_tmp32 = tcg_temp_new_i32();
4951     cpu_tmp64 = tcg_temp_new_i64();
4952
4953     cpu_dst = tcg_temp_local_new();
4954
4955     // loads and stores
4956     cpu_val = tcg_temp_local_new();
4957     cpu_addr = tcg_temp_local_new();
4958
4959     num_insns = 0;
4960     max_insns = tb->cflags & CF_COUNT_MASK;
4961     if (max_insns == 0)
4962         max_insns = CF_COUNT_MASK;
4963     gen_icount_start();
4964     do {
4965         if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4966             TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4967                 if (bp->pc == dc->pc) {
4968                     if (dc->pc != pc_start)
4969                         save_state(dc, cpu_cond);
4970                     gen_helper_debug();
4971                     tcg_gen_exit_tb(0);
4972                     dc->is_br = 1;
4973                     goto exit_gen_loop;
4974                 }
4975             }
4976         }
4977         if (spc) {
4978             qemu_log("Search PC...\n");
4979             j = gen_opc_ptr - gen_opc_buf;
4980             if (lj < j) {
4981                 lj++;
4982                 while (lj < j)
4983                     gen_opc_instr_start[lj++] = 0;
4984                 gen_opc_pc[lj] = dc->pc;
4985                 gen_opc_npc[lj] = dc->npc;
4986                 gen_opc_instr_start[lj] = 1;
4987                 gen_opc_icount[lj] = num_insns;
4988             }
4989         }
4990         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4991             gen_io_start();
4992         last_pc = dc->pc;
4993         disas_sparc_insn(dc);
4994         num_insns++;
4995
4996         if (dc->is_br)
4997             break;
4998         /* if the next PC is different, we abort now */
4999         if (dc->pc != (last_pc + 4))
5000             break;
5001         /* if we reach a page boundary, we stop generation so that the
5002            PC of a TT_TFAULT exception is always in the right page */
5003         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5004             break;
5005         /* if single step mode, we generate only one instruction and
5006            generate an exception */
5007         if (env->singlestep_enabled || singlestep) {
5008             tcg_gen_movi_tl(cpu_pc, dc->pc);
5009             tcg_gen_exit_tb(0);
5010             break;
5011         }
5012     } while ((gen_opc_ptr < gen_opc_end) &&
5013              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5014              num_insns < max_insns);
5015
5016  exit_gen_loop:
5017     tcg_temp_free(cpu_addr);
5018     tcg_temp_free(cpu_val);
5019     tcg_temp_free(cpu_dst);
5020     tcg_temp_free_i64(cpu_tmp64);
5021     tcg_temp_free_i32(cpu_tmp32);
5022     tcg_temp_free(cpu_tmp0);
5023     if (tb->cflags & CF_LAST_IO)
5024         gen_io_end();
5025     if (!dc->is_br) {
5026         if (dc->pc != DYNAMIC_PC &&
5027             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5028             /* static PC and NPC: we can use direct chaining */
5029             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5030         } else {
5031             if (dc->pc != DYNAMIC_PC)
5032                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5033             save_npc(dc, cpu_cond);
5034             tcg_gen_exit_tb(0);
5035         }
5036     }
5037     gen_icount_end(tb, num_insns);
5038     *gen_opc_ptr = INDEX_op_end;
5039     if (spc) {
5040         j = gen_opc_ptr - gen_opc_buf;
5041         lj++;
5042         while (lj <= j)
5043             gen_opc_instr_start[lj++] = 0;
5044 #if 0
5045         log_page_dump();
5046 #endif
5047         gen_opc_jump_pc[0] = dc->jump_pc[0];
5048         gen_opc_jump_pc[1] = dc->jump_pc[1];
5049     } else {
5050         tb->size = last_pc + 4 - pc_start;
5051         tb->icount = num_insns;
5052     }
5053 #ifdef DEBUG_DISAS
5054     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5055         qemu_log("--------------\n");
5056         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5057         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5058         qemu_log("\n");
5059     }
5060 #endif
5061 }
5062
5063 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5064 {
5065     gen_intermediate_code_internal(tb, 0, env);
5066 }
5067
5068 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5069 {
5070     gen_intermediate_code_internal(tb, 1, env);
5071 }
5072
5073 void gen_intermediate_code_init(CPUSPARCState *env)
5074 {
5075     unsigned int i;
5076     static int inited;
5077     static const char * const gregnames[8] = {
5078         NULL, // g0 not used
5079         "g1",
5080         "g2",
5081         "g3",
5082         "g4",
5083         "g5",
5084         "g6",
5085         "g7",
5086     };
5087     static const char * const fregnames[64] = {
5088         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5089         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5090         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5091         "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5092         "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5093         "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5094         "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5095         "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5096     };
5097
5098     /* init various static tables */
5099     if (!inited) {
5100         inited = 1;
5101
5102         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5103         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5104                                              offsetof(CPUState, regwptr),
5105                                              "regwptr");
5106 #ifdef TARGET_SPARC64
5107         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5108                                          "xcc");
5109         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5110                                          "asi");
5111         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5112                                           "fprs");
5113         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5114                                      "gsr");
5115         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5116                                            offsetof(CPUState, tick_cmpr),
5117                                            "tick_cmpr");
5118         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5119                                             offsetof(CPUState, stick_cmpr),
5120                                             "stick_cmpr");
5121         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5122                                              offsetof(CPUState, hstick_cmpr),
5123                                              "hstick_cmpr");
5124         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5125                                        "hintp");
5126         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5127                                       "htba");
5128         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5129                                       "hver");
5130         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5131                                      offsetof(CPUState, ssr), "ssr");
5132         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5133                                      offsetof(CPUState, version), "ver");
5134         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5135                                              offsetof(CPUState, softint),
5136                                              "softint");
5137 #else
5138         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5139                                      "wim");
5140 #endif
5141         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5142                                       "cond");
5143         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5144                                         "cc_src");
5145         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5146                                          offsetof(CPUState, cc_src2),
5147                                          "cc_src2");
5148         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5149                                         "cc_dst");
5150         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5151                                            "cc_op");
5152         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5153                                          "psr");
5154         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5155                                      "fsr");
5156         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5157                                     "pc");
5158         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5159                                      "npc");
5160         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5161 #ifndef CONFIG_USER_ONLY
5162         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5163                                      "tbr");
5164 #endif
5165         for (i = 1; i < 8; i++)
5166             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5167                                               offsetof(CPUState, gregs[i]),
5168                                               gregnames[i]);
5169         for (i = 0; i < TARGET_FPREGS; i++)
5170             cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5171                                                 offsetof(CPUState, fpr[i]),
5172                                                 fregnames[i]);
5173
5174         /* register helpers */
5175
5176 #define GEN_HELPER 2
5177 #include "helper.h"
5178     }
5179 }
5180
5181 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5182                 unsigned long searched_pc, int pc_pos, void *puc)
5183 {
5184     target_ulong npc;
5185     env->pc = gen_opc_pc[pc_pos];
5186     npc = gen_opc_npc[pc_pos];
5187     if (npc == 1) {
5188         /* dynamic NPC: already stored */
5189     } else if (npc == 2) {
5190         target_ulong t2 = (target_ulong)(unsigned long)puc;
5191         /* jump PC: use T2 and the jump targets of the translation */
5192         if (t2)
5193             env->npc = gen_opc_jump_pc[0];
5194         else
5195             env->npc = gen_opc_jump_pc[1];
5196     } else {
5197         env->npc = npc;
5198     }
5199 }