91ff6044956cf3e0506e459823155f9b712fd62b
[qemu] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, write to the Free Software
19    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
20  */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define GEN_HELPER 1
35 #include "helper.h"
36
37 #define DEBUG_DISAS
38
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68
69 #include "gen-icount.h"
70
71 typedef struct DisasContext {
72     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75     int is_br;
76     int mem_idx;
77     int fpu_enabled;
78     int address_mask_32bit;
79     uint32_t cc_op;  /* current CC operation */
80     struct TranslationBlock *tb;
81     sparc_def_t *def;
82 } DisasContext;
83
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO)                                  \
86     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO)               \
90     GET_FIELD(X, 31 - (TO), 31 - (FROM))
91
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
102
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
105
106 static int sign_extend(int x, int len)
107 {
108     len = 32 - len;
109     return (x << len) >> len;
110 }
111
112 #define IS_IMM (insn & (1<<13))
113
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
116 {
117     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118                    offsetof(CPU_DoubleU, l.upper));
119     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120                    offsetof(CPU_DoubleU, l.lower));
121 }
122
123 static void gen_op_load_fpr_DT1(unsigned int src)
124 {
125     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126                    offsetof(CPU_DoubleU, l.upper));
127     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128                    offsetof(CPU_DoubleU, l.lower));
129 }
130
131 static void gen_op_store_DT0_fpr(unsigned int dst)
132 {
133     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134                    offsetof(CPU_DoubleU, l.upper));
135     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136                    offsetof(CPU_DoubleU, l.lower));
137 }
138
139 static void gen_op_load_fpr_QT0(unsigned int src)
140 {
141     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142                    offsetof(CPU_QuadU, l.upmost));
143     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144                    offsetof(CPU_QuadU, l.upper));
145     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146                    offsetof(CPU_QuadU, l.lower));
147     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148                    offsetof(CPU_QuadU, l.lowest));
149 }
150
151 static void gen_op_load_fpr_QT1(unsigned int src)
152 {
153     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154                    offsetof(CPU_QuadU, l.upmost));
155     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156                    offsetof(CPU_QuadU, l.upper));
157     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158                    offsetof(CPU_QuadU, l.lower));
159     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160                    offsetof(CPU_QuadU, l.lowest));
161 }
162
163 static void gen_op_store_QT0_fpr(unsigned int dst)
164 {
165     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166                    offsetof(CPU_QuadU, l.upmost));
167     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168                    offsetof(CPU_QuadU, l.upper));
169     tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170                    offsetof(CPU_QuadU, l.lower));
171     tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172                    offsetof(CPU_QuadU, l.lowest));
173 }
174
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
188
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
196
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 {
199 #ifdef TARGET_SPARC64
200     if (AM_CHECK(dc))
201         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
203 }
204
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 {
207     if (reg == 0)
208         tcg_gen_movi_tl(tn, 0);
209     else if (reg < 8)
210         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211     else {
212         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
213     }
214 }
215
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 {
218     if (reg == 0)
219         return;
220     else if (reg < 8)
221         tcg_gen_mov_tl(cpu_gregs[reg], tn);
222     else {
223         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224     }
225 }
226
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228                                target_ulong pc, target_ulong npc)
229 {
230     TranslationBlock *tb;
231
232     tb = s->tb;
233     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
235         /* jump to same page: we can use a direct jump */
236         tcg_gen_goto_tb(tb_num);
237         tcg_gen_movi_tl(cpu_pc, pc);
238         tcg_gen_movi_tl(cpu_npc, npc);
239         tcg_gen_exit_tb((long)tb + tb_num);
240     } else {
241         /* jump to another page: currently not optimized */
242         tcg_gen_movi_tl(cpu_pc, pc);
243         tcg_gen_movi_tl(cpu_npc, npc);
244         tcg_gen_exit_tb(0);
245     }
246 }
247
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 {
251     tcg_gen_extu_i32_tl(reg, src);
252     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253     tcg_gen_andi_tl(reg, reg, 0x1);
254 }
255
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 {
258     tcg_gen_extu_i32_tl(reg, src);
259     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260     tcg_gen_andi_tl(reg, reg, 0x1);
261 }
262
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 {
265     tcg_gen_extu_i32_tl(reg, src);
266     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267     tcg_gen_andi_tl(reg, reg, 0x1);
268 }
269
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 {
272     tcg_gen_extu_i32_tl(reg, src);
273     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274     tcg_gen_andi_tl(reg, reg, 0x1);
275 }
276
277 static inline void gen_cc_clear_icc(void)
278 {
279     tcg_gen_movi_i32(cpu_psr, 0);
280 }
281
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
284 {
285     tcg_gen_movi_i32(cpu_xcc, 0);
286 }
287 #endif
288
289 /* old op:
290     if (!T0)
291         env->psr |= PSR_ZERO;
292     if ((int32_t) T0 < 0)
293         env->psr |= PSR_NEG;
294 */
295 static inline void gen_cc_NZ_icc(TCGv dst)
296 {
297     TCGv r_temp;
298     int l1, l2;
299
300     l1 = gen_new_label();
301     l2 = gen_new_label();
302     r_temp = tcg_temp_new();
303     tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306     gen_set_label(l1);
307     tcg_gen_ext32s_tl(r_temp, dst);
308     tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310     gen_set_label(l2);
311     tcg_temp_free(r_temp);
312 }
313
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
316 {
317     int l1, l2;
318
319     l1 = gen_new_label();
320     l2 = gen_new_label();
321     tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323     gen_set_label(l1);
324     tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326     gen_set_label(l2);
327 }
328 #endif
329
330 /* old op:
331     if (T0 < src1)
332         env->psr |= PSR_CARRY;
333 */
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 {
336     TCGv r_temp1, r_temp2;
337     int l1;
338
339     l1 = gen_new_label();
340     r_temp1 = tcg_temp_new();
341     r_temp2 = tcg_temp_new();
342     tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343     tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346     gen_set_label(l1);
347     tcg_temp_free(r_temp1);
348     tcg_temp_free(r_temp2);
349 }
350
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 {
354     int l1;
355
356     l1 = gen_new_label();
357     tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359     gen_set_label(l1);
360 }
361 #endif
362
363 /* old op:
364     if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365         env->psr |= PSR_OVF;
366 */
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 {
369     TCGv r_temp;
370
371     r_temp = tcg_temp_new();
372     tcg_gen_xor_tl(r_temp, src1, src2);
373     tcg_gen_not_tl(r_temp, r_temp);
374     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379     tcg_temp_free(r_temp);
380     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
381 }
382
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 {
386     TCGv r_temp;
387
388     r_temp = tcg_temp_new();
389     tcg_gen_xor_tl(r_temp, src1, src2);
390     tcg_gen_not_tl(r_temp, r_temp);
391     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396     tcg_temp_free(r_temp);
397     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 }
399 #endif
400
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 {
403     TCGv r_temp;
404     TCGv_i32 r_const;
405     int l1;
406
407     l1 = gen_new_label();
408
409     r_temp = tcg_temp_new();
410     tcg_gen_xor_tl(r_temp, src1, src2);
411     tcg_gen_not_tl(r_temp, r_temp);
412     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416     r_const = tcg_const_i32(TT_TOVF);
417     gen_helper_raise_exception(r_const);
418     tcg_temp_free_i32(r_const);
419     gen_set_label(l1);
420     tcg_temp_free(r_temp);
421 }
422
423 static inline void gen_tag_tv(TCGv src1, TCGv src2)
424 {
425     int l1;
426     TCGv_i32 r_const;
427
428     l1 = gen_new_label();
429     tcg_gen_or_tl(cpu_tmp0, src1, src2);
430     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
431     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
432     r_const = tcg_const_i32(TT_TOVF);
433     gen_helper_raise_exception(r_const);
434     tcg_temp_free_i32(r_const);
435     gen_set_label(l1);
436 }
437
438 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
439 {
440     tcg_gen_mov_tl(cpu_cc_src, src1);
441     tcg_gen_movi_tl(cpu_cc_src2, src2);
442     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
443     tcg_gen_mov_tl(dst, cpu_cc_dst);
444 }
445
446 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
447 {
448     tcg_gen_mov_tl(cpu_cc_src, src1);
449     tcg_gen_mov_tl(cpu_cc_src2, src2);
450     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
451     tcg_gen_mov_tl(dst, cpu_cc_dst);
452 }
453
454 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
455 {
456     tcg_gen_mov_tl(cpu_cc_src, src1);
457     tcg_gen_movi_tl(cpu_cc_src2, src2);
458     gen_mov_reg_C(cpu_tmp0, cpu_psr);
459     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
460     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
461     tcg_gen_mov_tl(dst, cpu_cc_dst);
462 }
463
464 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
465 {
466     tcg_gen_mov_tl(cpu_cc_src, src1);
467     tcg_gen_mov_tl(cpu_cc_src2, src2);
468     gen_mov_reg_C(cpu_tmp0, cpu_psr);
469     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
470     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
471     tcg_gen_mov_tl(dst, cpu_cc_dst);
472 }
473
474 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
475 {
476     tcg_gen_mov_tl(cpu_cc_src, src1);
477     tcg_gen_mov_tl(cpu_cc_src2, src2);
478     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
479     tcg_gen_mov_tl(dst, cpu_cc_dst);
480 }
481
482 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
483 {
484     tcg_gen_mov_tl(cpu_cc_src, src1);
485     tcg_gen_mov_tl(cpu_cc_src2, src2);
486     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
487     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488     gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489     tcg_gen_mov_tl(dst, cpu_cc_dst);
490 }
491
492 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
493 {
494     TCGv r_temp;
495     TCGv_i32 r_const;
496     int l1;
497
498     l1 = gen_new_label();
499
500     r_temp = tcg_temp_new();
501     tcg_gen_xor_tl(r_temp, src1, src2);
502     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
503     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
504     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
505     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
506     r_const = tcg_const_i32(TT_TOVF);
507     gen_helper_raise_exception(r_const);
508     tcg_temp_free_i32(r_const);
509     gen_set_label(l1);
510     tcg_temp_free(r_temp);
511 }
512
513 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
514 {
515     tcg_gen_mov_tl(cpu_cc_src, src1);
516     tcg_gen_movi_tl(cpu_cc_src2, src2);
517     if (src2 == 0) {
518         tcg_gen_mov_tl(cpu_cc_dst, src1);
519         tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
520         dc->cc_op = CC_OP_LOGIC;
521     } else {
522         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
523         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
524         dc->cc_op = CC_OP_SUB;
525     }
526     tcg_gen_mov_tl(dst, cpu_cc_dst);
527 }
528
529 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
530 {
531     tcg_gen_mov_tl(cpu_cc_src, src1);
532     tcg_gen_mov_tl(cpu_cc_src2, src2);
533     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
534     tcg_gen_mov_tl(dst, cpu_cc_dst);
535 }
536
537 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
538 {
539     tcg_gen_mov_tl(cpu_cc_src, src1);
540     tcg_gen_movi_tl(cpu_cc_src2, src2);
541     gen_mov_reg_C(cpu_tmp0, cpu_psr);
542     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
543     tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
544     tcg_gen_mov_tl(dst, cpu_cc_dst);
545 }
546
547 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
548 {
549     tcg_gen_mov_tl(cpu_cc_src, src1);
550     tcg_gen_mov_tl(cpu_cc_src2, src2);
551     gen_mov_reg_C(cpu_tmp0, cpu_psr);
552     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
553     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
554     tcg_gen_mov_tl(dst, cpu_cc_dst);
555 }
556
557 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
558 {
559     tcg_gen_mov_tl(cpu_cc_src, src1);
560     tcg_gen_mov_tl(cpu_cc_src2, src2);
561     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
562     tcg_gen_mov_tl(dst, cpu_cc_dst);
563 }
564
565 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
566 {
567     tcg_gen_mov_tl(cpu_cc_src, src1);
568     tcg_gen_mov_tl(cpu_cc_src2, src2);
569     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
570     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
571     gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
572     tcg_gen_mov_tl(dst, cpu_cc_dst);
573 }
574
575 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
576 {
577     TCGv r_temp;
578     int l1;
579
580     l1 = gen_new_label();
581     r_temp = tcg_temp_new();
582
583     /* old op:
584     if (!(env->y & 1))
585         T1 = 0;
586     */
587     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
588     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
589     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
590     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
591     tcg_gen_movi_tl(cpu_cc_src2, 0);
592     gen_set_label(l1);
593
594     // b2 = T0 & 1;
595     // env->y = (b2 << 31) | (env->y >> 1);
596     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
597     tcg_gen_shli_tl(r_temp, r_temp, 31);
598     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
599     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
600     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
601     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
602
603     // b1 = N ^ V;
604     gen_mov_reg_N(cpu_tmp0, cpu_psr);
605     gen_mov_reg_V(r_temp, cpu_psr);
606     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
607     tcg_temp_free(r_temp);
608
609     // T0 = (b1 << 31) | (T0 >> 1);
610     // src1 = T0;
611     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
612     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
613     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
614
615     /* do addition and update flags */
616     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617
618     gen_cc_clear_icc();
619     gen_cc_NZ_icc(cpu_cc_dst);
620     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
621     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
622     tcg_gen_mov_tl(dst, cpu_cc_dst);
623 }
624
625 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
626 {
627     TCGv_i64 r_temp, r_temp2;
628
629     r_temp = tcg_temp_new_i64();
630     r_temp2 = tcg_temp_new_i64();
631
632     tcg_gen_extu_tl_i64(r_temp, src2);
633     tcg_gen_extu_tl_i64(r_temp2, src1);
634     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
635
636     tcg_gen_shri_i64(r_temp, r_temp2, 32);
637     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
638     tcg_temp_free_i64(r_temp);
639     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
640 #ifdef TARGET_SPARC64
641     tcg_gen_mov_i64(dst, r_temp2);
642 #else
643     tcg_gen_trunc_i64_tl(dst, r_temp2);
644 #endif
645     tcg_temp_free_i64(r_temp2);
646 }
647
648 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
649 {
650     TCGv_i64 r_temp, r_temp2;
651
652     r_temp = tcg_temp_new_i64();
653     r_temp2 = tcg_temp_new_i64();
654
655     tcg_gen_ext_tl_i64(r_temp, src2);
656     tcg_gen_ext_tl_i64(r_temp2, src1);
657     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
658
659     tcg_gen_shri_i64(r_temp, r_temp2, 32);
660     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
661     tcg_temp_free_i64(r_temp);
662     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
663 #ifdef TARGET_SPARC64
664     tcg_gen_mov_i64(dst, r_temp2);
665 #else
666     tcg_gen_trunc_i64_tl(dst, r_temp2);
667 #endif
668     tcg_temp_free_i64(r_temp2);
669 }
670
671 #ifdef TARGET_SPARC64
672 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
673 {
674     TCGv_i32 r_const;
675     int l1;
676
677     l1 = gen_new_label();
678     tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
679     r_const = tcg_const_i32(TT_DIV_ZERO);
680     gen_helper_raise_exception(r_const);
681     tcg_temp_free_i32(r_const);
682     gen_set_label(l1);
683 }
684
685 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
686 {
687     int l1, l2;
688
689     l1 = gen_new_label();
690     l2 = gen_new_label();
691     tcg_gen_mov_tl(cpu_cc_src, src1);
692     tcg_gen_mov_tl(cpu_cc_src2, src2);
693     gen_trap_ifdivzero_tl(cpu_cc_src2);
694     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
695     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
696     tcg_gen_movi_i64(dst, INT64_MIN);
697     tcg_gen_br(l2);
698     gen_set_label(l1);
699     tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
700     gen_set_label(l2);
701 }
702 #endif
703
704 static inline void gen_op_div_cc(TCGv dst)
705 {
706     int l1;
707
708     tcg_gen_mov_tl(cpu_cc_dst, dst);
709     gen_cc_clear_icc();
710     gen_cc_NZ_icc(cpu_cc_dst);
711     l1 = gen_new_label();
712     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
713     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
714     gen_set_label(l1);
715 }
716
717 // 1
718 static inline void gen_op_eval_ba(TCGv dst)
719 {
720     tcg_gen_movi_tl(dst, 1);
721 }
722
723 // Z
724 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
725 {
726     gen_mov_reg_Z(dst, src);
727 }
728
729 // Z | (N ^ V)
730 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
731 {
732     gen_mov_reg_N(cpu_tmp0, src);
733     gen_mov_reg_V(dst, src);
734     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
735     gen_mov_reg_Z(cpu_tmp0, src);
736     tcg_gen_or_tl(dst, dst, cpu_tmp0);
737 }
738
739 // N ^ V
740 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
741 {
742     gen_mov_reg_V(cpu_tmp0, src);
743     gen_mov_reg_N(dst, src);
744     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
745 }
746
747 // C | Z
748 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
749 {
750     gen_mov_reg_Z(cpu_tmp0, src);
751     gen_mov_reg_C(dst, src);
752     tcg_gen_or_tl(dst, dst, cpu_tmp0);
753 }
754
755 // C
756 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
757 {
758     gen_mov_reg_C(dst, src);
759 }
760
761 // V
762 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
763 {
764     gen_mov_reg_V(dst, src);
765 }
766
767 // 0
768 static inline void gen_op_eval_bn(TCGv dst)
769 {
770     tcg_gen_movi_tl(dst, 0);
771 }
772
773 // N
774 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
775 {
776     gen_mov_reg_N(dst, src);
777 }
778
779 // !Z
780 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
781 {
782     gen_mov_reg_Z(dst, src);
783     tcg_gen_xori_tl(dst, dst, 0x1);
784 }
785
786 // !(Z | (N ^ V))
787 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
788 {
789     gen_mov_reg_N(cpu_tmp0, src);
790     gen_mov_reg_V(dst, src);
791     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
792     gen_mov_reg_Z(cpu_tmp0, src);
793     tcg_gen_or_tl(dst, dst, cpu_tmp0);
794     tcg_gen_xori_tl(dst, dst, 0x1);
795 }
796
797 // !(N ^ V)
798 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
799 {
800     gen_mov_reg_V(cpu_tmp0, src);
801     gen_mov_reg_N(dst, src);
802     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
803     tcg_gen_xori_tl(dst, dst, 0x1);
804 }
805
806 // !(C | Z)
807 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
808 {
809     gen_mov_reg_Z(cpu_tmp0, src);
810     gen_mov_reg_C(dst, src);
811     tcg_gen_or_tl(dst, dst, cpu_tmp0);
812     tcg_gen_xori_tl(dst, dst, 0x1);
813 }
814
815 // !C
816 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
817 {
818     gen_mov_reg_C(dst, src);
819     tcg_gen_xori_tl(dst, dst, 0x1);
820 }
821
822 // !N
823 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
824 {
825     gen_mov_reg_N(dst, src);
826     tcg_gen_xori_tl(dst, dst, 0x1);
827 }
828
829 // !V
830 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
831 {
832     gen_mov_reg_V(dst, src);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835
836 /*
837   FPSR bit field FCC1 | FCC0:
838    0 =
839    1 <
840    2 >
841    3 unordered
842 */
843 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
844                                     unsigned int fcc_offset)
845 {
846     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
847     tcg_gen_andi_tl(reg, reg, 0x1);
848 }
849
850 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
851                                     unsigned int fcc_offset)
852 {
853     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
854     tcg_gen_andi_tl(reg, reg, 0x1);
855 }
856
857 // !0: FCC0 | FCC1
858 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
859                                     unsigned int fcc_offset)
860 {
861     gen_mov_reg_FCC0(dst, src, fcc_offset);
862     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
863     tcg_gen_or_tl(dst, dst, cpu_tmp0);
864 }
865
866 // 1 or 2: FCC0 ^ FCC1
867 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
868                                     unsigned int fcc_offset)
869 {
870     gen_mov_reg_FCC0(dst, src, fcc_offset);
871     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
872     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873 }
874
875 // 1 or 3: FCC0
876 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
877                                     unsigned int fcc_offset)
878 {
879     gen_mov_reg_FCC0(dst, src, fcc_offset);
880 }
881
882 // 1: FCC0 & !FCC1
883 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
884                                     unsigned int fcc_offset)
885 {
886     gen_mov_reg_FCC0(dst, src, fcc_offset);
887     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
888     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
889     tcg_gen_and_tl(dst, dst, cpu_tmp0);
890 }
891
892 // 2 or 3: FCC1
893 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
894                                     unsigned int fcc_offset)
895 {
896     gen_mov_reg_FCC1(dst, src, fcc_offset);
897 }
898
899 // 2: !FCC0 & FCC1
900 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
901                                     unsigned int fcc_offset)
902 {
903     gen_mov_reg_FCC0(dst, src, fcc_offset);
904     tcg_gen_xori_tl(dst, dst, 0x1);
905     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906     tcg_gen_and_tl(dst, dst, cpu_tmp0);
907 }
908
909 // 3: FCC0 & FCC1
910 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
911                                     unsigned int fcc_offset)
912 {
913     gen_mov_reg_FCC0(dst, src, fcc_offset);
914     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
915     tcg_gen_and_tl(dst, dst, cpu_tmp0);
916 }
917
918 // 0: !(FCC0 | FCC1)
919 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
920                                     unsigned int fcc_offset)
921 {
922     gen_mov_reg_FCC0(dst, src, fcc_offset);
923     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
924     tcg_gen_or_tl(dst, dst, cpu_tmp0);
925     tcg_gen_xori_tl(dst, dst, 0x1);
926 }
927
928 // 0 or 3: !(FCC0 ^ FCC1)
929 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
930                                     unsigned int fcc_offset)
931 {
932     gen_mov_reg_FCC0(dst, src, fcc_offset);
933     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
934     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
935     tcg_gen_xori_tl(dst, dst, 0x1);
936 }
937
938 // 0 or 2: !FCC0
939 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
940                                     unsigned int fcc_offset)
941 {
942     gen_mov_reg_FCC0(dst, src, fcc_offset);
943     tcg_gen_xori_tl(dst, dst, 0x1);
944 }
945
946 // !1: !(FCC0 & !FCC1)
947 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
948                                     unsigned int fcc_offset)
949 {
950     gen_mov_reg_FCC0(dst, src, fcc_offset);
951     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
952     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
953     tcg_gen_and_tl(dst, dst, cpu_tmp0);
954     tcg_gen_xori_tl(dst, dst, 0x1);
955 }
956
957 // 0 or 1: !FCC1
958 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
959                                     unsigned int fcc_offset)
960 {
961     gen_mov_reg_FCC1(dst, src, fcc_offset);
962     tcg_gen_xori_tl(dst, dst, 0x1);
963 }
964
965 // !2: !(!FCC0 & FCC1)
966 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
967                                     unsigned int fcc_offset)
968 {
969     gen_mov_reg_FCC0(dst, src, fcc_offset);
970     tcg_gen_xori_tl(dst, dst, 0x1);
971     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
972     tcg_gen_and_tl(dst, dst, cpu_tmp0);
973     tcg_gen_xori_tl(dst, dst, 0x1);
974 }
975
976 // !3: !(FCC0 & FCC1)
977 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
978                                     unsigned int fcc_offset)
979 {
980     gen_mov_reg_FCC0(dst, src, fcc_offset);
981     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
982     tcg_gen_and_tl(dst, dst, cpu_tmp0);
983     tcg_gen_xori_tl(dst, dst, 0x1);
984 }
985
986 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
987                                target_ulong pc2, TCGv r_cond)
988 {
989     int l1;
990
991     l1 = gen_new_label();
992
993     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
994
995     gen_goto_tb(dc, 0, pc1, pc1 + 4);
996
997     gen_set_label(l1);
998     gen_goto_tb(dc, 1, pc2, pc2 + 4);
999 }
1000
1001 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1002                                 target_ulong pc2, TCGv r_cond)
1003 {
1004     int l1;
1005
1006     l1 = gen_new_label();
1007
1008     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1009
1010     gen_goto_tb(dc, 0, pc2, pc1);
1011
1012     gen_set_label(l1);
1013     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1014 }
1015
1016 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1017                                       TCGv r_cond)
1018 {
1019     int l1, l2;
1020
1021     l1 = gen_new_label();
1022     l2 = gen_new_label();
1023
1024     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1025
1026     tcg_gen_movi_tl(cpu_npc, npc1);
1027     tcg_gen_br(l2);
1028
1029     gen_set_label(l1);
1030     tcg_gen_movi_tl(cpu_npc, npc2);
1031     gen_set_label(l2);
1032 }
1033
1034 /* call this function before using the condition register as it may
1035    have been set for a jump */
1036 static inline void flush_cond(DisasContext *dc, TCGv cond)
1037 {
1038     if (dc->npc == JUMP_PC) {
1039         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1040         dc->npc = DYNAMIC_PC;
1041     }
1042 }
1043
1044 static inline void save_npc(DisasContext *dc, TCGv cond)
1045 {
1046     if (dc->npc == JUMP_PC) {
1047         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1048         dc->npc = DYNAMIC_PC;
1049     } else if (dc->npc != DYNAMIC_PC) {
1050         tcg_gen_movi_tl(cpu_npc, dc->npc);
1051     }
1052 }
1053
1054 static inline void save_state(DisasContext *dc, TCGv cond)
1055 {
1056     tcg_gen_movi_tl(cpu_pc, dc->pc);
1057     save_npc(dc, cond);
1058 }
1059
1060 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1061 {
1062     if (dc->npc == JUMP_PC) {
1063         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1064         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1065         dc->pc = DYNAMIC_PC;
1066     } else if (dc->npc == DYNAMIC_PC) {
1067         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1068         dc->pc = DYNAMIC_PC;
1069     } else {
1070         dc->pc = dc->npc;
1071     }
1072 }
1073
1074 static inline void gen_op_next_insn(void)
1075 {
1076     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1077     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1078 }
1079
1080 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1081                             DisasContext *dc)
1082 {
1083     TCGv_i32 r_src;
1084
1085 #ifdef TARGET_SPARC64
1086     if (cc)
1087         r_src = cpu_xcc;
1088     else
1089         r_src = cpu_psr;
1090 #else
1091     r_src = cpu_psr;
1092 #endif
1093     switch (dc->cc_op) {
1094     case CC_OP_FLAGS:
1095         break;
1096     default:
1097         gen_helper_compute_psr();
1098         dc->cc_op = CC_OP_FLAGS;
1099         break;
1100     }
1101     switch (cond) {
1102     case 0x0:
1103         gen_op_eval_bn(r_dst);
1104         break;
1105     case 0x1:
1106         gen_op_eval_be(r_dst, r_src);
1107         break;
1108     case 0x2:
1109         gen_op_eval_ble(r_dst, r_src);
1110         break;
1111     case 0x3:
1112         gen_op_eval_bl(r_dst, r_src);
1113         break;
1114     case 0x4:
1115         gen_op_eval_bleu(r_dst, r_src);
1116         break;
1117     case 0x5:
1118         gen_op_eval_bcs(r_dst, r_src);
1119         break;
1120     case 0x6:
1121         gen_op_eval_bneg(r_dst, r_src);
1122         break;
1123     case 0x7:
1124         gen_op_eval_bvs(r_dst, r_src);
1125         break;
1126     case 0x8:
1127         gen_op_eval_ba(r_dst);
1128         break;
1129     case 0x9:
1130         gen_op_eval_bne(r_dst, r_src);
1131         break;
1132     case 0xa:
1133         gen_op_eval_bg(r_dst, r_src);
1134         break;
1135     case 0xb:
1136         gen_op_eval_bge(r_dst, r_src);
1137         break;
1138     case 0xc:
1139         gen_op_eval_bgu(r_dst, r_src);
1140         break;
1141     case 0xd:
1142         gen_op_eval_bcc(r_dst, r_src);
1143         break;
1144     case 0xe:
1145         gen_op_eval_bpos(r_dst, r_src);
1146         break;
1147     case 0xf:
1148         gen_op_eval_bvc(r_dst, r_src);
1149         break;
1150     }
1151 }
1152
1153 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1154 {
1155     unsigned int offset;
1156
1157     switch (cc) {
1158     default:
1159     case 0x0:
1160         offset = 0;
1161         break;
1162     case 0x1:
1163         offset = 32 - 10;
1164         break;
1165     case 0x2:
1166         offset = 34 - 10;
1167         break;
1168     case 0x3:
1169         offset = 36 - 10;
1170         break;
1171     }
1172
1173     switch (cond) {
1174     case 0x0:
1175         gen_op_eval_bn(r_dst);
1176         break;
1177     case 0x1:
1178         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1179         break;
1180     case 0x2:
1181         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1182         break;
1183     case 0x3:
1184         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1185         break;
1186     case 0x4:
1187         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1188         break;
1189     case 0x5:
1190         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1191         break;
1192     case 0x6:
1193         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1194         break;
1195     case 0x7:
1196         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1197         break;
1198     case 0x8:
1199         gen_op_eval_ba(r_dst);
1200         break;
1201     case 0x9:
1202         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1203         break;
1204     case 0xa:
1205         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1206         break;
1207     case 0xb:
1208         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1209         break;
1210     case 0xc:
1211         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1212         break;
1213     case 0xd:
1214         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1215         break;
1216     case 0xe:
1217         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1218         break;
1219     case 0xf:
1220         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1221         break;
1222     }
1223 }
1224
1225 #ifdef TARGET_SPARC64
1226 // Inverted logic
1227 static const int gen_tcg_cond_reg[8] = {
1228     -1,
1229     TCG_COND_NE,
1230     TCG_COND_GT,
1231     TCG_COND_GE,
1232     -1,
1233     TCG_COND_EQ,
1234     TCG_COND_LE,
1235     TCG_COND_LT,
1236 };
1237
1238 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1239 {
1240     int l1;
1241
1242     l1 = gen_new_label();
1243     tcg_gen_movi_tl(r_dst, 0);
1244     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1245     tcg_gen_movi_tl(r_dst, 1);
1246     gen_set_label(l1);
1247 }
1248 #endif
1249
1250 /* XXX: potentially incorrect if dynamic npc */
1251 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1252                       TCGv r_cond)
1253 {
1254     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1255     target_ulong target = dc->pc + offset;
1256
1257     if (cond == 0x0) {
1258         /* unconditional not taken */
1259         if (a) {
1260             dc->pc = dc->npc + 4;
1261             dc->npc = dc->pc + 4;
1262         } else {
1263             dc->pc = dc->npc;
1264             dc->npc = dc->pc + 4;
1265         }
1266     } else if (cond == 0x8) {
1267         /* unconditional taken */
1268         if (a) {
1269             dc->pc = target;
1270             dc->npc = dc->pc + 4;
1271         } else {
1272             dc->pc = dc->npc;
1273             dc->npc = target;
1274         }
1275     } else {
1276         flush_cond(dc, r_cond);
1277         gen_cond(r_cond, cc, cond, dc);
1278         if (a) {
1279             gen_branch_a(dc, target, dc->npc, r_cond);
1280             dc->is_br = 1;
1281         } else {
1282             dc->pc = dc->npc;
1283             dc->jump_pc[0] = target;
1284             dc->jump_pc[1] = dc->npc + 4;
1285             dc->npc = JUMP_PC;
1286         }
1287     }
1288 }
1289
1290 /* XXX: potentially incorrect if dynamic npc */
1291 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1292                       TCGv r_cond)
1293 {
1294     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1295     target_ulong target = dc->pc + offset;
1296
1297     if (cond == 0x0) {
1298         /* unconditional not taken */
1299         if (a) {
1300             dc->pc = dc->npc + 4;
1301             dc->npc = dc->pc + 4;
1302         } else {
1303             dc->pc = dc->npc;
1304             dc->npc = dc->pc + 4;
1305         }
1306     } else if (cond == 0x8) {
1307         /* unconditional taken */
1308         if (a) {
1309             dc->pc = target;
1310             dc->npc = dc->pc + 4;
1311         } else {
1312             dc->pc = dc->npc;
1313             dc->npc = target;
1314         }
1315     } else {
1316         flush_cond(dc, r_cond);
1317         gen_fcond(r_cond, cc, cond);
1318         if (a) {
1319             gen_branch_a(dc, target, dc->npc, r_cond);
1320             dc->is_br = 1;
1321         } else {
1322             dc->pc = dc->npc;
1323             dc->jump_pc[0] = target;
1324             dc->jump_pc[1] = dc->npc + 4;
1325             dc->npc = JUMP_PC;
1326         }
1327     }
1328 }
1329
1330 #ifdef TARGET_SPARC64
1331 /* XXX: potentially incorrect if dynamic npc */
1332 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1333                           TCGv r_cond, TCGv r_reg)
1334 {
1335     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1336     target_ulong target = dc->pc + offset;
1337
1338     flush_cond(dc, r_cond);
1339     gen_cond_reg(r_cond, cond, r_reg);
1340     if (a) {
1341         gen_branch_a(dc, target, dc->npc, r_cond);
1342         dc->is_br = 1;
1343     } else {
1344         dc->pc = dc->npc;
1345         dc->jump_pc[0] = target;
1346         dc->jump_pc[1] = dc->npc + 4;
1347         dc->npc = JUMP_PC;
1348     }
1349 }
1350
1351 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1352 {
1353     switch (fccno) {
1354     case 0:
1355         gen_helper_fcmps(r_rs1, r_rs2);
1356         break;
1357     case 1:
1358         gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1359         break;
1360     case 2:
1361         gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1362         break;
1363     case 3:
1364         gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1365         break;
1366     }
1367 }
1368
1369 static inline void gen_op_fcmpd(int fccno)
1370 {
1371     switch (fccno) {
1372     case 0:
1373         gen_helper_fcmpd();
1374         break;
1375     case 1:
1376         gen_helper_fcmpd_fcc1();
1377         break;
1378     case 2:
1379         gen_helper_fcmpd_fcc2();
1380         break;
1381     case 3:
1382         gen_helper_fcmpd_fcc3();
1383         break;
1384     }
1385 }
1386
1387 static inline void gen_op_fcmpq(int fccno)
1388 {
1389     switch (fccno) {
1390     case 0:
1391         gen_helper_fcmpq();
1392         break;
1393     case 1:
1394         gen_helper_fcmpq_fcc1();
1395         break;
1396     case 2:
1397         gen_helper_fcmpq_fcc2();
1398         break;
1399     case 3:
1400         gen_helper_fcmpq_fcc3();
1401         break;
1402     }
1403 }
1404
1405 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1406 {
1407     switch (fccno) {
1408     case 0:
1409         gen_helper_fcmpes(r_rs1, r_rs2);
1410         break;
1411     case 1:
1412         gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1413         break;
1414     case 2:
1415         gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1416         break;
1417     case 3:
1418         gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1419         break;
1420     }
1421 }
1422
1423 static inline void gen_op_fcmped(int fccno)
1424 {
1425     switch (fccno) {
1426     case 0:
1427         gen_helper_fcmped();
1428         break;
1429     case 1:
1430         gen_helper_fcmped_fcc1();
1431         break;
1432     case 2:
1433         gen_helper_fcmped_fcc2();
1434         break;
1435     case 3:
1436         gen_helper_fcmped_fcc3();
1437         break;
1438     }
1439 }
1440
1441 static inline void gen_op_fcmpeq(int fccno)
1442 {
1443     switch (fccno) {
1444     case 0:
1445         gen_helper_fcmpeq();
1446         break;
1447     case 1:
1448         gen_helper_fcmpeq_fcc1();
1449         break;
1450     case 2:
1451         gen_helper_fcmpeq_fcc2();
1452         break;
1453     case 3:
1454         gen_helper_fcmpeq_fcc3();
1455         break;
1456     }
1457 }
1458
1459 #else
1460
1461 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1462 {
1463     gen_helper_fcmps(r_rs1, r_rs2);
1464 }
1465
1466 static inline void gen_op_fcmpd(int fccno)
1467 {
1468     gen_helper_fcmpd();
1469 }
1470
1471 static inline void gen_op_fcmpq(int fccno)
1472 {
1473     gen_helper_fcmpq();
1474 }
1475
1476 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1477 {
1478     gen_helper_fcmpes(r_rs1, r_rs2);
1479 }
1480
1481 static inline void gen_op_fcmped(int fccno)
1482 {
1483     gen_helper_fcmped();
1484 }
1485
1486 static inline void gen_op_fcmpeq(int fccno)
1487 {
1488     gen_helper_fcmpeq();
1489 }
1490 #endif
1491
1492 static inline void gen_op_fpexception_im(int fsr_flags)
1493 {
1494     TCGv_i32 r_const;
1495
1496     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1497     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1498     r_const = tcg_const_i32(TT_FP_EXCP);
1499     gen_helper_raise_exception(r_const);
1500     tcg_temp_free_i32(r_const);
1501 }
1502
1503 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1504 {
1505 #if !defined(CONFIG_USER_ONLY)
1506     if (!dc->fpu_enabled) {
1507         TCGv_i32 r_const;
1508
1509         save_state(dc, r_cond);
1510         r_const = tcg_const_i32(TT_NFPU_INSN);
1511         gen_helper_raise_exception(r_const);
1512         tcg_temp_free_i32(r_const);
1513         dc->is_br = 1;
1514         return 1;
1515     }
1516 #endif
1517     return 0;
1518 }
1519
1520 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1521 {
1522     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1523 }
1524
1525 static inline void gen_clear_float_exceptions(void)
1526 {
1527     gen_helper_clear_float_exceptions();
1528 }
1529
1530 /* asi moves */
1531 #ifdef TARGET_SPARC64
1532 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1533 {
1534     int asi;
1535     TCGv_i32 r_asi;
1536
1537     if (IS_IMM) {
1538         r_asi = tcg_temp_new_i32();
1539         tcg_gen_mov_i32(r_asi, cpu_asi);
1540     } else {
1541         asi = GET_FIELD(insn, 19, 26);
1542         r_asi = tcg_const_i32(asi);
1543     }
1544     return r_asi;
1545 }
1546
1547 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1548                               int sign)
1549 {
1550     TCGv_i32 r_asi, r_size, r_sign;
1551
1552     r_asi = gen_get_asi(insn, addr);
1553     r_size = tcg_const_i32(size);
1554     r_sign = tcg_const_i32(sign);
1555     gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1556     tcg_temp_free_i32(r_sign);
1557     tcg_temp_free_i32(r_size);
1558     tcg_temp_free_i32(r_asi);
1559 }
1560
1561 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1562 {
1563     TCGv_i32 r_asi, r_size;
1564
1565     r_asi = gen_get_asi(insn, addr);
1566     r_size = tcg_const_i32(size);
1567     gen_helper_st_asi(addr, src, r_asi, r_size);
1568     tcg_temp_free_i32(r_size);
1569     tcg_temp_free_i32(r_asi);
1570 }
1571
1572 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1573 {
1574     TCGv_i32 r_asi, r_size, r_rd;
1575
1576     r_asi = gen_get_asi(insn, addr);
1577     r_size = tcg_const_i32(size);
1578     r_rd = tcg_const_i32(rd);
1579     gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1580     tcg_temp_free_i32(r_rd);
1581     tcg_temp_free_i32(r_size);
1582     tcg_temp_free_i32(r_asi);
1583 }
1584
1585 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1586 {
1587     TCGv_i32 r_asi, r_size, r_rd;
1588
1589     r_asi = gen_get_asi(insn, addr);
1590     r_size = tcg_const_i32(size);
1591     r_rd = tcg_const_i32(rd);
1592     gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1593     tcg_temp_free_i32(r_rd);
1594     tcg_temp_free_i32(r_size);
1595     tcg_temp_free_i32(r_asi);
1596 }
1597
1598 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1599 {
1600     TCGv_i32 r_asi, r_size, r_sign;
1601
1602     r_asi = gen_get_asi(insn, addr);
1603     r_size = tcg_const_i32(4);
1604     r_sign = tcg_const_i32(0);
1605     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1606     tcg_temp_free_i32(r_sign);
1607     gen_helper_st_asi(addr, dst, r_asi, r_size);
1608     tcg_temp_free_i32(r_size);
1609     tcg_temp_free_i32(r_asi);
1610     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1611 }
1612
1613 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1614 {
1615     TCGv_i32 r_asi, r_rd;
1616
1617     r_asi = gen_get_asi(insn, addr);
1618     r_rd = tcg_const_i32(rd);
1619     gen_helper_ldda_asi(addr, r_asi, r_rd);
1620     tcg_temp_free_i32(r_rd);
1621     tcg_temp_free_i32(r_asi);
1622 }
1623
1624 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1625 {
1626     TCGv_i32 r_asi, r_size;
1627
1628     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1629     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1630     r_asi = gen_get_asi(insn, addr);
1631     r_size = tcg_const_i32(8);
1632     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1633     tcg_temp_free_i32(r_size);
1634     tcg_temp_free_i32(r_asi);
1635 }
1636
1637 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1638                                int rd)
1639 {
1640     TCGv r_val1;
1641     TCGv_i32 r_asi;
1642
1643     r_val1 = tcg_temp_new();
1644     gen_movl_reg_TN(rd, r_val1);
1645     r_asi = gen_get_asi(insn, addr);
1646     gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1647     tcg_temp_free_i32(r_asi);
1648     tcg_temp_free(r_val1);
1649 }
1650
1651 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1652                                 int rd)
1653 {
1654     TCGv_i32 r_asi;
1655
1656     gen_movl_reg_TN(rd, cpu_tmp64);
1657     r_asi = gen_get_asi(insn, addr);
1658     gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1659     tcg_temp_free_i32(r_asi);
1660 }
1661
1662 #elif !defined(CONFIG_USER_ONLY)
1663
1664 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1665                               int sign)
1666 {
1667     TCGv_i32 r_asi, r_size, r_sign;
1668
1669     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1670     r_size = tcg_const_i32(size);
1671     r_sign = tcg_const_i32(sign);
1672     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1673     tcg_temp_free(r_sign);
1674     tcg_temp_free(r_size);
1675     tcg_temp_free(r_asi);
1676     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1677 }
1678
1679 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1680 {
1681     TCGv_i32 r_asi, r_size;
1682
1683     tcg_gen_extu_tl_i64(cpu_tmp64, src);
1684     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1685     r_size = tcg_const_i32(size);
1686     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1687     tcg_temp_free(r_size);
1688     tcg_temp_free(r_asi);
1689 }
1690
1691 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1692 {
1693     TCGv_i32 r_asi, r_size, r_sign;
1694     TCGv_i64 r_val;
1695
1696     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1697     r_size = tcg_const_i32(4);
1698     r_sign = tcg_const_i32(0);
1699     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1700     tcg_temp_free(r_sign);
1701     r_val = tcg_temp_new_i64();
1702     tcg_gen_extu_tl_i64(r_val, dst);
1703     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1704     tcg_temp_free_i64(r_val);
1705     tcg_temp_free(r_size);
1706     tcg_temp_free(r_asi);
1707     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1708 }
1709
1710 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1711 {
1712     TCGv_i32 r_asi, r_size, r_sign;
1713
1714     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1715     r_size = tcg_const_i32(8);
1716     r_sign = tcg_const_i32(0);
1717     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1718     tcg_temp_free(r_sign);
1719     tcg_temp_free(r_size);
1720     tcg_temp_free(r_asi);
1721     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1722     gen_movl_TN_reg(rd + 1, cpu_tmp0);
1723     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1724     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1725     gen_movl_TN_reg(rd, hi);
1726 }
1727
1728 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1729 {
1730     TCGv_i32 r_asi, r_size;
1731
1732     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1733     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1734     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1735     r_size = tcg_const_i32(8);
1736     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1737     tcg_temp_free(r_size);
1738     tcg_temp_free(r_asi);
1739 }
1740 #endif
1741
1742 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1743 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1744 {
1745     TCGv_i64 r_val;
1746     TCGv_i32 r_asi, r_size;
1747
1748     gen_ld_asi(dst, addr, insn, 1, 0);
1749
1750     r_val = tcg_const_i64(0xffULL);
1751     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1752     r_size = tcg_const_i32(1);
1753     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1754     tcg_temp_free_i32(r_size);
1755     tcg_temp_free_i32(r_asi);
1756     tcg_temp_free_i64(r_val);
1757 }
1758 #endif
1759
1760 static inline TCGv get_src1(unsigned int insn, TCGv def)
1761 {
1762     TCGv r_rs1 = def;
1763     unsigned int rs1;
1764
1765     rs1 = GET_FIELD(insn, 13, 17);
1766     if (rs1 == 0)
1767         r_rs1 = tcg_const_tl(0); // XXX how to free?
1768     else if (rs1 < 8)
1769         r_rs1 = cpu_gregs[rs1];
1770     else
1771         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1772     return r_rs1;
1773 }
1774
1775 static inline TCGv get_src2(unsigned int insn, TCGv def)
1776 {
1777     TCGv r_rs2 = def;
1778
1779     if (IS_IMM) { /* immediate */
1780         target_long simm;
1781
1782         simm = GET_FIELDs(insn, 19, 31);
1783         r_rs2 = tcg_const_tl(simm); // XXX how to free?
1784     } else { /* register */
1785         unsigned int rs2;
1786
1787         rs2 = GET_FIELD(insn, 27, 31);
1788         if (rs2 == 0)
1789             r_rs2 = tcg_const_tl(0); // XXX how to free?
1790         else if (rs2 < 8)
1791             r_rs2 = cpu_gregs[rs2];
1792         else
1793             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1794     }
1795     return r_rs2;
1796 }
1797
1798 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
1799     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1800         goto illegal_insn;
1801 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
1802     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
1803         goto nfpu_insn;
1804
1805 /* before an instruction, dc->pc must be static */
1806 static void disas_sparc_insn(DisasContext * dc)
1807 {
1808     unsigned int insn, opc, rs1, rs2, rd;
1809     target_long simm;
1810
1811     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1812         tcg_gen_debug_insn_start(dc->pc);
1813     insn = ldl_code(dc->pc);
1814     opc = GET_FIELD(insn, 0, 1);
1815
1816     rd = GET_FIELD(insn, 2, 6);
1817
1818     cpu_src1 = tcg_temp_new(); // const
1819     cpu_src2 = tcg_temp_new(); // const
1820
1821     switch (opc) {
1822     case 0:                     /* branches/sethi */
1823         {
1824             unsigned int xop = GET_FIELD(insn, 7, 9);
1825             int32_t target;
1826             switch (xop) {
1827 #ifdef TARGET_SPARC64
1828             case 0x1:           /* V9 BPcc */
1829                 {
1830                     int cc;
1831
1832                     target = GET_FIELD_SP(insn, 0, 18);
1833                     target = sign_extend(target, 18);
1834                     target <<= 2;
1835                     cc = GET_FIELD_SP(insn, 20, 21);
1836                     if (cc == 0)
1837                         do_branch(dc, target, insn, 0, cpu_cond);
1838                     else if (cc == 2)
1839                         do_branch(dc, target, insn, 1, cpu_cond);
1840                     else
1841                         goto illegal_insn;
1842                     goto jmp_insn;
1843                 }
1844             case 0x3:           /* V9 BPr */
1845                 {
1846                     target = GET_FIELD_SP(insn, 0, 13) |
1847                         (GET_FIELD_SP(insn, 20, 21) << 14);
1848                     target = sign_extend(target, 16);
1849                     target <<= 2;
1850                     cpu_src1 = get_src1(insn, cpu_src1);
1851                     do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1852                     goto jmp_insn;
1853                 }
1854             case 0x5:           /* V9 FBPcc */
1855                 {
1856                     int cc = GET_FIELD_SP(insn, 20, 21);
1857                     if (gen_trap_ifnofpu(dc, cpu_cond))
1858                         goto jmp_insn;
1859                     target = GET_FIELD_SP(insn, 0, 18);
1860                     target = sign_extend(target, 19);
1861                     target <<= 2;
1862                     do_fbranch(dc, target, insn, cc, cpu_cond);
1863                     goto jmp_insn;
1864                 }
1865 #else
1866             case 0x7:           /* CBN+x */
1867                 {
1868                     goto ncp_insn;
1869                 }
1870 #endif
1871             case 0x2:           /* BN+x */
1872                 {
1873                     target = GET_FIELD(insn, 10, 31);
1874                     target = sign_extend(target, 22);
1875                     target <<= 2;
1876                     do_branch(dc, target, insn, 0, cpu_cond);
1877                     goto jmp_insn;
1878                 }
1879             case 0x6:           /* FBN+x */
1880                 {
1881                     if (gen_trap_ifnofpu(dc, cpu_cond))
1882                         goto jmp_insn;
1883                     target = GET_FIELD(insn, 10, 31);
1884                     target = sign_extend(target, 22);
1885                     target <<= 2;
1886                     do_fbranch(dc, target, insn, 0, cpu_cond);
1887                     goto jmp_insn;
1888                 }
1889             case 0x4:           /* SETHI */
1890                 if (rd) { // nop
1891                     uint32_t value = GET_FIELD(insn, 10, 31);
1892                     TCGv r_const;
1893
1894                     r_const = tcg_const_tl(value << 10);
1895                     gen_movl_TN_reg(rd, r_const);
1896                     tcg_temp_free(r_const);
1897                 }
1898                 break;
1899             case 0x0:           /* UNIMPL */
1900             default:
1901                 goto illegal_insn;
1902             }
1903             break;
1904         }
1905         break;
1906     case 1:                     /*CALL*/
1907         {
1908             target_long target = GET_FIELDs(insn, 2, 31) << 2;
1909             TCGv r_const;
1910
1911             r_const = tcg_const_tl(dc->pc);
1912             gen_movl_TN_reg(15, r_const);
1913             tcg_temp_free(r_const);
1914             target += dc->pc;
1915             gen_mov_pc_npc(dc, cpu_cond);
1916             dc->npc = target;
1917         }
1918         goto jmp_insn;
1919     case 2:                     /* FPU & Logical Operations */
1920         {
1921             unsigned int xop = GET_FIELD(insn, 7, 12);
1922             if (xop == 0x3a) {  /* generate trap */
1923                 int cond;
1924
1925                 cpu_src1 = get_src1(insn, cpu_src1);
1926                 if (IS_IMM) {
1927                     rs2 = GET_FIELD(insn, 25, 31);
1928                     tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1929                 } else {
1930                     rs2 = GET_FIELD(insn, 27, 31);
1931                     if (rs2 != 0) {
1932                         gen_movl_reg_TN(rs2, cpu_src2);
1933                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1934                     } else
1935                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
1936                 }
1937                 cond = GET_FIELD(insn, 3, 6);
1938                 if (cond == 0x8) {
1939                     save_state(dc, cpu_cond);
1940                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
1941                         supervisor(dc))
1942                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1943                     else
1944                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1945                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1946                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1947                     gen_helper_raise_exception(cpu_tmp32);
1948                 } else if (cond != 0) {
1949                     TCGv r_cond = tcg_temp_new();
1950                     int l1;
1951 #ifdef TARGET_SPARC64
1952                     /* V9 icc/xcc */
1953                     int cc = GET_FIELD_SP(insn, 11, 12);
1954
1955                     save_state(dc, cpu_cond);
1956                     if (cc == 0)
1957                         gen_cond(r_cond, 0, cond, dc);
1958                     else if (cc == 2)
1959                         gen_cond(r_cond, 1, cond, dc);
1960                     else
1961                         goto illegal_insn;
1962 #else
1963                     save_state(dc, cpu_cond);
1964                     gen_cond(r_cond, 0, cond, dc);
1965 #endif
1966                     l1 = gen_new_label();
1967                     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1968
1969                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
1970                         supervisor(dc))
1971                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1972                     else
1973                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1974                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1975                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1976                     gen_helper_raise_exception(cpu_tmp32);
1977
1978                     gen_set_label(l1);
1979                     tcg_temp_free(r_cond);
1980                 }
1981                 gen_op_next_insn();
1982                 tcg_gen_exit_tb(0);
1983                 dc->is_br = 1;
1984                 goto jmp_insn;
1985             } else if (xop == 0x28) {
1986                 rs1 = GET_FIELD(insn, 13, 17);
1987                 switch(rs1) {
1988                 case 0: /* rdy */
1989 #ifndef TARGET_SPARC64
1990                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1991                                        manual, rdy on the microSPARC
1992                                        II */
1993                 case 0x0f:          /* stbar in the SPARCv8 manual,
1994                                        rdy on the microSPARC II */
1995                 case 0x10 ... 0x1f: /* implementation-dependent in the
1996                                        SPARCv8 manual, rdy on the
1997                                        microSPARC II */
1998 #endif
1999                     gen_movl_TN_reg(rd, cpu_y);
2000                     break;
2001 #ifdef TARGET_SPARC64
2002                 case 0x2: /* V9 rdccr */
2003                     gen_helper_compute_psr();
2004                     gen_helper_rdccr(cpu_dst);
2005                     gen_movl_TN_reg(rd, cpu_dst);
2006                     break;
2007                 case 0x3: /* V9 rdasi */
2008                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2009                     gen_movl_TN_reg(rd, cpu_dst);
2010                     break;
2011                 case 0x4: /* V9 rdtick */
2012                     {
2013                         TCGv_ptr r_tickptr;
2014
2015                         r_tickptr = tcg_temp_new_ptr();
2016                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2017                                        offsetof(CPUState, tick));
2018                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2019                         tcg_temp_free_ptr(r_tickptr);
2020                         gen_movl_TN_reg(rd, cpu_dst);
2021                     }
2022                     break;
2023                 case 0x5: /* V9 rdpc */
2024                     {
2025                         TCGv r_const;
2026
2027                         r_const = tcg_const_tl(dc->pc);
2028                         gen_movl_TN_reg(rd, r_const);
2029                         tcg_temp_free(r_const);
2030                     }
2031                     break;
2032                 case 0x6: /* V9 rdfprs */
2033                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2034                     gen_movl_TN_reg(rd, cpu_dst);
2035                     break;
2036                 case 0xf: /* V9 membar */
2037                     break; /* no effect */
2038                 case 0x13: /* Graphics Status */
2039                     if (gen_trap_ifnofpu(dc, cpu_cond))
2040                         goto jmp_insn;
2041                     gen_movl_TN_reg(rd, cpu_gsr);
2042                     break;
2043                 case 0x16: /* Softint */
2044                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2045                     gen_movl_TN_reg(rd, cpu_dst);
2046                     break;
2047                 case 0x17: /* Tick compare */
2048                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2049                     break;
2050                 case 0x18: /* System tick */
2051                     {
2052                         TCGv_ptr r_tickptr;
2053
2054                         r_tickptr = tcg_temp_new_ptr();
2055                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2056                                        offsetof(CPUState, stick));
2057                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2058                         tcg_temp_free_ptr(r_tickptr);
2059                         gen_movl_TN_reg(rd, cpu_dst);
2060                     }
2061                     break;
2062                 case 0x19: /* System tick compare */
2063                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2064                     break;
2065                 case 0x10: /* Performance Control */
2066                 case 0x11: /* Performance Instrumentation Counter */
2067                 case 0x12: /* Dispatch Control */
2068                 case 0x14: /* Softint set, WO */
2069                 case 0x15: /* Softint clear, WO */
2070 #endif
2071                 default:
2072                     goto illegal_insn;
2073                 }
2074 #if !defined(CONFIG_USER_ONLY)
2075             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2076 #ifndef TARGET_SPARC64
2077                 if (!supervisor(dc))
2078                     goto priv_insn;
2079                 gen_helper_compute_psr();
2080                 dc->cc_op = CC_OP_FLAGS;
2081                 gen_helper_rdpsr(cpu_dst);
2082 #else
2083                 CHECK_IU_FEATURE(dc, HYPV);
2084                 if (!hypervisor(dc))
2085                     goto priv_insn;
2086                 rs1 = GET_FIELD(insn, 13, 17);
2087                 switch (rs1) {
2088                 case 0: // hpstate
2089                     // gen_op_rdhpstate();
2090                     break;
2091                 case 1: // htstate
2092                     // gen_op_rdhtstate();
2093                     break;
2094                 case 3: // hintp
2095                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2096                     break;
2097                 case 5: // htba
2098                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2099                     break;
2100                 case 6: // hver
2101                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2102                     break;
2103                 case 31: // hstick_cmpr
2104                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2105                     break;
2106                 default:
2107                     goto illegal_insn;
2108                 }
2109 #endif
2110                 gen_movl_TN_reg(rd, cpu_dst);
2111                 break;
2112             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2113                 if (!supervisor(dc))
2114                     goto priv_insn;
2115 #ifdef TARGET_SPARC64
2116                 rs1 = GET_FIELD(insn, 13, 17);
2117                 switch (rs1) {
2118                 case 0: // tpc
2119                     {
2120                         TCGv_ptr r_tsptr;
2121
2122                         r_tsptr = tcg_temp_new_ptr();
2123                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2124                                        offsetof(CPUState, tsptr));
2125                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2126                                       offsetof(trap_state, tpc));
2127                         tcg_temp_free_ptr(r_tsptr);
2128                     }
2129                     break;
2130                 case 1: // tnpc
2131                     {
2132                         TCGv_ptr r_tsptr;
2133
2134                         r_tsptr = tcg_temp_new_ptr();
2135                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2136                                        offsetof(CPUState, tsptr));
2137                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2138                                       offsetof(trap_state, tnpc));
2139                         tcg_temp_free_ptr(r_tsptr);
2140                     }
2141                     break;
2142                 case 2: // tstate
2143                     {
2144                         TCGv_ptr r_tsptr;
2145
2146                         r_tsptr = tcg_temp_new_ptr();
2147                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2148                                        offsetof(CPUState, tsptr));
2149                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2150                                       offsetof(trap_state, tstate));
2151                         tcg_temp_free_ptr(r_tsptr);
2152                     }
2153                     break;
2154                 case 3: // tt
2155                     {
2156                         TCGv_ptr r_tsptr;
2157
2158                         r_tsptr = tcg_temp_new_ptr();
2159                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2160                                        offsetof(CPUState, tsptr));
2161                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2162                                        offsetof(trap_state, tt));
2163                         tcg_temp_free_ptr(r_tsptr);
2164                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2165                     }
2166                     break;
2167                 case 4: // tick
2168                     {
2169                         TCGv_ptr r_tickptr;
2170
2171                         r_tickptr = tcg_temp_new_ptr();
2172                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2173                                        offsetof(CPUState, tick));
2174                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2175                         gen_movl_TN_reg(rd, cpu_tmp0);
2176                         tcg_temp_free_ptr(r_tickptr);
2177                     }
2178                     break;
2179                 case 5: // tba
2180                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2181                     break;
2182                 case 6: // pstate
2183                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2184                                    offsetof(CPUSPARCState, pstate));
2185                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2186                     break;
2187                 case 7: // tl
2188                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2189                                    offsetof(CPUSPARCState, tl));
2190                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2191                     break;
2192                 case 8: // pil
2193                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2194                                    offsetof(CPUSPARCState, psrpil));
2195                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2196                     break;
2197                 case 9: // cwp
2198                     gen_helper_rdcwp(cpu_tmp0);
2199                     break;
2200                 case 10: // cansave
2201                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2202                                    offsetof(CPUSPARCState, cansave));
2203                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2204                     break;
2205                 case 11: // canrestore
2206                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2207                                    offsetof(CPUSPARCState, canrestore));
2208                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2209                     break;
2210                 case 12: // cleanwin
2211                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2212                                    offsetof(CPUSPARCState, cleanwin));
2213                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2214                     break;
2215                 case 13: // otherwin
2216                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2217                                    offsetof(CPUSPARCState, otherwin));
2218                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2219                     break;
2220                 case 14: // wstate
2221                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2222                                    offsetof(CPUSPARCState, wstate));
2223                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2224                     break;
2225                 case 16: // UA2005 gl
2226                     CHECK_IU_FEATURE(dc, GL);
2227                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2228                                    offsetof(CPUSPARCState, gl));
2229                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2230                     break;
2231                 case 26: // UA2005 strand status
2232                     CHECK_IU_FEATURE(dc, HYPV);
2233                     if (!hypervisor(dc))
2234                         goto priv_insn;
2235                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2236                     break;
2237                 case 31: // ver
2238                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2239                     break;
2240                 case 15: // fq
2241                 default:
2242                     goto illegal_insn;
2243                 }
2244 #else
2245                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2246 #endif
2247                 gen_movl_TN_reg(rd, cpu_tmp0);
2248                 break;
2249             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2250 #ifdef TARGET_SPARC64
2251                 save_state(dc, cpu_cond);
2252                 gen_helper_flushw();
2253 #else
2254                 if (!supervisor(dc))
2255                     goto priv_insn;
2256                 gen_movl_TN_reg(rd, cpu_tbr);
2257 #endif
2258                 break;
2259 #endif
2260             } else if (xop == 0x34) {   /* FPU Operations */
2261                 if (gen_trap_ifnofpu(dc, cpu_cond))
2262                     goto jmp_insn;
2263                 gen_op_clear_ieee_excp_and_FTT();
2264                 rs1 = GET_FIELD(insn, 13, 17);
2265                 rs2 = GET_FIELD(insn, 27, 31);
2266                 xop = GET_FIELD(insn, 18, 26);
2267                 switch (xop) {
2268                 case 0x1: /* fmovs */
2269                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2270                     break;
2271                 case 0x5: /* fnegs */
2272                     gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2273                     break;
2274                 case 0x9: /* fabss */
2275                     gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2276                     break;
2277                 case 0x29: /* fsqrts */
2278                     CHECK_FPU_FEATURE(dc, FSQRT);
2279                     gen_clear_float_exceptions();
2280                     gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2281                     gen_helper_check_ieee_exceptions();
2282                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2283                     break;
2284                 case 0x2a: /* fsqrtd */
2285                     CHECK_FPU_FEATURE(dc, FSQRT);
2286                     gen_op_load_fpr_DT1(DFPREG(rs2));
2287                     gen_clear_float_exceptions();
2288                     gen_helper_fsqrtd();
2289                     gen_helper_check_ieee_exceptions();
2290                     gen_op_store_DT0_fpr(DFPREG(rd));
2291                     break;
2292                 case 0x2b: /* fsqrtq */
2293                     CHECK_FPU_FEATURE(dc, FLOAT128);
2294                     gen_op_load_fpr_QT1(QFPREG(rs2));
2295                     gen_clear_float_exceptions();
2296                     gen_helper_fsqrtq();
2297                     gen_helper_check_ieee_exceptions();
2298                     gen_op_store_QT0_fpr(QFPREG(rd));
2299                     break;
2300                 case 0x41: /* fadds */
2301                     gen_clear_float_exceptions();
2302                     gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2303                     gen_helper_check_ieee_exceptions();
2304                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2305                     break;
2306                 case 0x42: /* faddd */
2307                     gen_op_load_fpr_DT0(DFPREG(rs1));
2308                     gen_op_load_fpr_DT1(DFPREG(rs2));
2309                     gen_clear_float_exceptions();
2310                     gen_helper_faddd();
2311                     gen_helper_check_ieee_exceptions();
2312                     gen_op_store_DT0_fpr(DFPREG(rd));
2313                     break;
2314                 case 0x43: /* faddq */
2315                     CHECK_FPU_FEATURE(dc, FLOAT128);
2316                     gen_op_load_fpr_QT0(QFPREG(rs1));
2317                     gen_op_load_fpr_QT1(QFPREG(rs2));
2318                     gen_clear_float_exceptions();
2319                     gen_helper_faddq();
2320                     gen_helper_check_ieee_exceptions();
2321                     gen_op_store_QT0_fpr(QFPREG(rd));
2322                     break;
2323                 case 0x45: /* fsubs */
2324                     gen_clear_float_exceptions();
2325                     gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2326                     gen_helper_check_ieee_exceptions();
2327                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2328                     break;
2329                 case 0x46: /* fsubd */
2330                     gen_op_load_fpr_DT0(DFPREG(rs1));
2331                     gen_op_load_fpr_DT1(DFPREG(rs2));
2332                     gen_clear_float_exceptions();
2333                     gen_helper_fsubd();
2334                     gen_helper_check_ieee_exceptions();
2335                     gen_op_store_DT0_fpr(DFPREG(rd));
2336                     break;
2337                 case 0x47: /* fsubq */
2338                     CHECK_FPU_FEATURE(dc, FLOAT128);
2339                     gen_op_load_fpr_QT0(QFPREG(rs1));
2340                     gen_op_load_fpr_QT1(QFPREG(rs2));
2341                     gen_clear_float_exceptions();
2342                     gen_helper_fsubq();
2343                     gen_helper_check_ieee_exceptions();
2344                     gen_op_store_QT0_fpr(QFPREG(rd));
2345                     break;
2346                 case 0x49: /* fmuls */
2347                     CHECK_FPU_FEATURE(dc, FMUL);
2348                     gen_clear_float_exceptions();
2349                     gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2350                     gen_helper_check_ieee_exceptions();
2351                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2352                     break;
2353                 case 0x4a: /* fmuld */
2354                     CHECK_FPU_FEATURE(dc, FMUL);
2355                     gen_op_load_fpr_DT0(DFPREG(rs1));
2356                     gen_op_load_fpr_DT1(DFPREG(rs2));
2357                     gen_clear_float_exceptions();
2358                     gen_helper_fmuld();
2359                     gen_helper_check_ieee_exceptions();
2360                     gen_op_store_DT0_fpr(DFPREG(rd));
2361                     break;
2362                 case 0x4b: /* fmulq */
2363                     CHECK_FPU_FEATURE(dc, FLOAT128);
2364                     CHECK_FPU_FEATURE(dc, FMUL);
2365                     gen_op_load_fpr_QT0(QFPREG(rs1));
2366                     gen_op_load_fpr_QT1(QFPREG(rs2));
2367                     gen_clear_float_exceptions();
2368                     gen_helper_fmulq();
2369                     gen_helper_check_ieee_exceptions();
2370                     gen_op_store_QT0_fpr(QFPREG(rd));
2371                     break;
2372                 case 0x4d: /* fdivs */
2373                     gen_clear_float_exceptions();
2374                     gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2375                     gen_helper_check_ieee_exceptions();
2376                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2377                     break;
2378                 case 0x4e: /* fdivd */
2379                     gen_op_load_fpr_DT0(DFPREG(rs1));
2380                     gen_op_load_fpr_DT1(DFPREG(rs2));
2381                     gen_clear_float_exceptions();
2382                     gen_helper_fdivd();
2383                     gen_helper_check_ieee_exceptions();
2384                     gen_op_store_DT0_fpr(DFPREG(rd));
2385                     break;
2386                 case 0x4f: /* fdivq */
2387                     CHECK_FPU_FEATURE(dc, FLOAT128);
2388                     gen_op_load_fpr_QT0(QFPREG(rs1));
2389                     gen_op_load_fpr_QT1(QFPREG(rs2));
2390                     gen_clear_float_exceptions();
2391                     gen_helper_fdivq();
2392                     gen_helper_check_ieee_exceptions();
2393                     gen_op_store_QT0_fpr(QFPREG(rd));
2394                     break;
2395                 case 0x69: /* fsmuld */
2396                     CHECK_FPU_FEATURE(dc, FSMULD);
2397                     gen_clear_float_exceptions();
2398                     gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2399                     gen_helper_check_ieee_exceptions();
2400                     gen_op_store_DT0_fpr(DFPREG(rd));
2401                     break;
2402                 case 0x6e: /* fdmulq */
2403                     CHECK_FPU_FEATURE(dc, FLOAT128);
2404                     gen_op_load_fpr_DT0(DFPREG(rs1));
2405                     gen_op_load_fpr_DT1(DFPREG(rs2));
2406                     gen_clear_float_exceptions();
2407                     gen_helper_fdmulq();
2408                     gen_helper_check_ieee_exceptions();
2409                     gen_op_store_QT0_fpr(QFPREG(rd));
2410                     break;
2411                 case 0xc4: /* fitos */
2412                     gen_clear_float_exceptions();
2413                     gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2414                     gen_helper_check_ieee_exceptions();
2415                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2416                     break;
2417                 case 0xc6: /* fdtos */
2418                     gen_op_load_fpr_DT1(DFPREG(rs2));
2419                     gen_clear_float_exceptions();
2420                     gen_helper_fdtos(cpu_tmp32);
2421                     gen_helper_check_ieee_exceptions();
2422                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2423                     break;
2424                 case 0xc7: /* fqtos */
2425                     CHECK_FPU_FEATURE(dc, FLOAT128);
2426                     gen_op_load_fpr_QT1(QFPREG(rs2));
2427                     gen_clear_float_exceptions();
2428                     gen_helper_fqtos(cpu_tmp32);
2429                     gen_helper_check_ieee_exceptions();
2430                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2431                     break;
2432                 case 0xc8: /* fitod */
2433                     gen_helper_fitod(cpu_fpr[rs2]);
2434                     gen_op_store_DT0_fpr(DFPREG(rd));
2435                     break;
2436                 case 0xc9: /* fstod */
2437                     gen_helper_fstod(cpu_fpr[rs2]);
2438                     gen_op_store_DT0_fpr(DFPREG(rd));
2439                     break;
2440                 case 0xcb: /* fqtod */
2441                     CHECK_FPU_FEATURE(dc, FLOAT128);
2442                     gen_op_load_fpr_QT1(QFPREG(rs2));
2443                     gen_clear_float_exceptions();
2444                     gen_helper_fqtod();
2445                     gen_helper_check_ieee_exceptions();
2446                     gen_op_store_DT0_fpr(DFPREG(rd));
2447                     break;
2448                 case 0xcc: /* fitoq */
2449                     CHECK_FPU_FEATURE(dc, FLOAT128);
2450                     gen_helper_fitoq(cpu_fpr[rs2]);
2451                     gen_op_store_QT0_fpr(QFPREG(rd));
2452                     break;
2453                 case 0xcd: /* fstoq */
2454                     CHECK_FPU_FEATURE(dc, FLOAT128);
2455                     gen_helper_fstoq(cpu_fpr[rs2]);
2456                     gen_op_store_QT0_fpr(QFPREG(rd));
2457                     break;
2458                 case 0xce: /* fdtoq */
2459                     CHECK_FPU_FEATURE(dc, FLOAT128);
2460                     gen_op_load_fpr_DT1(DFPREG(rs2));
2461                     gen_helper_fdtoq();
2462                     gen_op_store_QT0_fpr(QFPREG(rd));
2463                     break;
2464                 case 0xd1: /* fstoi */
2465                     gen_clear_float_exceptions();
2466                     gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2467                     gen_helper_check_ieee_exceptions();
2468                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2469                     break;
2470                 case 0xd2: /* fdtoi */
2471                     gen_op_load_fpr_DT1(DFPREG(rs2));
2472                     gen_clear_float_exceptions();
2473                     gen_helper_fdtoi(cpu_tmp32);
2474                     gen_helper_check_ieee_exceptions();
2475                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2476                     break;
2477                 case 0xd3: /* fqtoi */
2478                     CHECK_FPU_FEATURE(dc, FLOAT128);
2479                     gen_op_load_fpr_QT1(QFPREG(rs2));
2480                     gen_clear_float_exceptions();
2481                     gen_helper_fqtoi(cpu_tmp32);
2482                     gen_helper_check_ieee_exceptions();
2483                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2484                     break;
2485 #ifdef TARGET_SPARC64
2486                 case 0x2: /* V9 fmovd */
2487                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2488                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2489                                     cpu_fpr[DFPREG(rs2) + 1]);
2490                     break;
2491                 case 0x3: /* V9 fmovq */
2492                     CHECK_FPU_FEATURE(dc, FLOAT128);
2493                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2494                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2495                                     cpu_fpr[QFPREG(rs2) + 1]);
2496                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2497                                     cpu_fpr[QFPREG(rs2) + 2]);
2498                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2499                                     cpu_fpr[QFPREG(rs2) + 3]);
2500                     break;
2501                 case 0x6: /* V9 fnegd */
2502                     gen_op_load_fpr_DT1(DFPREG(rs2));
2503                     gen_helper_fnegd();
2504                     gen_op_store_DT0_fpr(DFPREG(rd));
2505                     break;
2506                 case 0x7: /* V9 fnegq */
2507                     CHECK_FPU_FEATURE(dc, FLOAT128);
2508                     gen_op_load_fpr_QT1(QFPREG(rs2));
2509                     gen_helper_fnegq();
2510                     gen_op_store_QT0_fpr(QFPREG(rd));
2511                     break;
2512                 case 0xa: /* V9 fabsd */
2513                     gen_op_load_fpr_DT1(DFPREG(rs2));
2514                     gen_helper_fabsd();
2515                     gen_op_store_DT0_fpr(DFPREG(rd));
2516                     break;
2517                 case 0xb: /* V9 fabsq */
2518                     CHECK_FPU_FEATURE(dc, FLOAT128);
2519                     gen_op_load_fpr_QT1(QFPREG(rs2));
2520                     gen_helper_fabsq();
2521                     gen_op_store_QT0_fpr(QFPREG(rd));
2522                     break;
2523                 case 0x81: /* V9 fstox */
2524                     gen_clear_float_exceptions();
2525                     gen_helper_fstox(cpu_fpr[rs2]);
2526                     gen_helper_check_ieee_exceptions();
2527                     gen_op_store_DT0_fpr(DFPREG(rd));
2528                     break;
2529                 case 0x82: /* V9 fdtox */
2530                     gen_op_load_fpr_DT1(DFPREG(rs2));
2531                     gen_clear_float_exceptions();
2532                     gen_helper_fdtox();
2533                     gen_helper_check_ieee_exceptions();
2534                     gen_op_store_DT0_fpr(DFPREG(rd));
2535                     break;
2536                 case 0x83: /* V9 fqtox */
2537                     CHECK_FPU_FEATURE(dc, FLOAT128);
2538                     gen_op_load_fpr_QT1(QFPREG(rs2));
2539                     gen_clear_float_exceptions();
2540                     gen_helper_fqtox();
2541                     gen_helper_check_ieee_exceptions();
2542                     gen_op_store_DT0_fpr(DFPREG(rd));
2543                     break;
2544                 case 0x84: /* V9 fxtos */
2545                     gen_op_load_fpr_DT1(DFPREG(rs2));
2546                     gen_clear_float_exceptions();
2547                     gen_helper_fxtos(cpu_tmp32);
2548                     gen_helper_check_ieee_exceptions();
2549                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2550                     break;
2551                 case 0x88: /* V9 fxtod */
2552                     gen_op_load_fpr_DT1(DFPREG(rs2));
2553                     gen_clear_float_exceptions();
2554                     gen_helper_fxtod();
2555                     gen_helper_check_ieee_exceptions();
2556                     gen_op_store_DT0_fpr(DFPREG(rd));
2557                     break;
2558                 case 0x8c: /* V9 fxtoq */
2559                     CHECK_FPU_FEATURE(dc, FLOAT128);
2560                     gen_op_load_fpr_DT1(DFPREG(rs2));
2561                     gen_clear_float_exceptions();
2562                     gen_helper_fxtoq();
2563                     gen_helper_check_ieee_exceptions();
2564                     gen_op_store_QT0_fpr(QFPREG(rd));
2565                     break;
2566 #endif
2567                 default:
2568                     goto illegal_insn;
2569                 }
2570             } else if (xop == 0x35) {   /* FPU Operations */
2571 #ifdef TARGET_SPARC64
2572                 int cond;
2573 #endif
2574                 if (gen_trap_ifnofpu(dc, cpu_cond))
2575                     goto jmp_insn;
2576                 gen_op_clear_ieee_excp_and_FTT();
2577                 rs1 = GET_FIELD(insn, 13, 17);
2578                 rs2 = GET_FIELD(insn, 27, 31);
2579                 xop = GET_FIELD(insn, 18, 26);
2580 #ifdef TARGET_SPARC64
2581                 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2582                     int l1;
2583
2584                     l1 = gen_new_label();
2585                     cond = GET_FIELD_SP(insn, 14, 17);
2586                     cpu_src1 = get_src1(insn, cpu_src1);
2587                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2588                                        0, l1);
2589                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2590                     gen_set_label(l1);
2591                     break;
2592                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2593                     int l1;
2594
2595                     l1 = gen_new_label();
2596                     cond = GET_FIELD_SP(insn, 14, 17);
2597                     cpu_src1 = get_src1(insn, cpu_src1);
2598                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2599                                        0, l1);
2600                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2601                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2602                     gen_set_label(l1);
2603                     break;
2604                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2605                     int l1;
2606
2607                     CHECK_FPU_FEATURE(dc, FLOAT128);
2608                     l1 = gen_new_label();
2609                     cond = GET_FIELD_SP(insn, 14, 17);
2610                     cpu_src1 = get_src1(insn, cpu_src1);
2611                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2612                                        0, l1);
2613                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2614                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2615                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2616                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2617                     gen_set_label(l1);
2618                     break;
2619                 }
2620 #endif
2621                 switch (xop) {
2622 #ifdef TARGET_SPARC64
2623 #define FMOVSCC(fcc)                                                    \
2624                     {                                                   \
2625                         TCGv r_cond;                                    \
2626                         int l1;                                         \
2627                                                                         \
2628                         l1 = gen_new_label();                           \
2629                         r_cond = tcg_temp_new();                        \
2630                         cond = GET_FIELD_SP(insn, 14, 17);              \
2631                         gen_fcond(r_cond, fcc, cond);                   \
2632                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2633                                            0, l1);                      \
2634                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2635                         gen_set_label(l1);                              \
2636                         tcg_temp_free(r_cond);                          \
2637                     }
2638 #define FMOVDCC(fcc)                                                    \
2639                     {                                                   \
2640                         TCGv r_cond;                                    \
2641                         int l1;                                         \
2642                                                                         \
2643                         l1 = gen_new_label();                           \
2644                         r_cond = tcg_temp_new();                        \
2645                         cond = GET_FIELD_SP(insn, 14, 17);              \
2646                         gen_fcond(r_cond, fcc, cond);                   \
2647                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2648                                            0, l1);                      \
2649                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2650                                         cpu_fpr[DFPREG(rs2)]);          \
2651                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2652                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2653                         gen_set_label(l1);                              \
2654                         tcg_temp_free(r_cond);                          \
2655                     }
2656 #define FMOVQCC(fcc)                                                    \
2657                     {                                                   \
2658                         TCGv r_cond;                                    \
2659                         int l1;                                         \
2660                                                                         \
2661                         l1 = gen_new_label();                           \
2662                         r_cond = tcg_temp_new();                        \
2663                         cond = GET_FIELD_SP(insn, 14, 17);              \
2664                         gen_fcond(r_cond, fcc, cond);                   \
2665                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2666                                            0, l1);                      \
2667                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2668                                         cpu_fpr[QFPREG(rs2)]);          \
2669                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2670                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2671                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2672                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2673                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2674                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2675                         gen_set_label(l1);                              \
2676                         tcg_temp_free(r_cond);                          \
2677                     }
2678                     case 0x001: /* V9 fmovscc %fcc0 */
2679                         FMOVSCC(0);
2680                         break;
2681                     case 0x002: /* V9 fmovdcc %fcc0 */
2682                         FMOVDCC(0);
2683                         break;
2684                     case 0x003: /* V9 fmovqcc %fcc0 */
2685                         CHECK_FPU_FEATURE(dc, FLOAT128);
2686                         FMOVQCC(0);
2687                         break;
2688                     case 0x041: /* V9 fmovscc %fcc1 */
2689                         FMOVSCC(1);
2690                         break;
2691                     case 0x042: /* V9 fmovdcc %fcc1 */
2692                         FMOVDCC(1);
2693                         break;
2694                     case 0x043: /* V9 fmovqcc %fcc1 */
2695                         CHECK_FPU_FEATURE(dc, FLOAT128);
2696                         FMOVQCC(1);
2697                         break;
2698                     case 0x081: /* V9 fmovscc %fcc2 */
2699                         FMOVSCC(2);
2700                         break;
2701                     case 0x082: /* V9 fmovdcc %fcc2 */
2702                         FMOVDCC(2);
2703                         break;
2704                     case 0x083: /* V9 fmovqcc %fcc2 */
2705                         CHECK_FPU_FEATURE(dc, FLOAT128);
2706                         FMOVQCC(2);
2707                         break;
2708                     case 0x0c1: /* V9 fmovscc %fcc3 */
2709                         FMOVSCC(3);
2710                         break;
2711                     case 0x0c2: /* V9 fmovdcc %fcc3 */
2712                         FMOVDCC(3);
2713                         break;
2714                     case 0x0c3: /* V9 fmovqcc %fcc3 */
2715                         CHECK_FPU_FEATURE(dc, FLOAT128);
2716                         FMOVQCC(3);
2717                         break;
2718 #undef FMOVSCC
2719 #undef FMOVDCC
2720 #undef FMOVQCC
2721 #define FMOVSCC(icc)                                                    \
2722                     {                                                   \
2723                         TCGv r_cond;                                    \
2724                         int l1;                                         \
2725                                                                         \
2726                         l1 = gen_new_label();                           \
2727                         r_cond = tcg_temp_new();                        \
2728                         cond = GET_FIELD_SP(insn, 14, 17);              \
2729                         gen_cond(r_cond, icc, cond, dc);                \
2730                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2731                                            0, l1);                      \
2732                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2733                         gen_set_label(l1);                              \
2734                         tcg_temp_free(r_cond);                          \
2735                     }
2736 #define FMOVDCC(icc)                                                    \
2737                     {                                                   \
2738                         TCGv r_cond;                                    \
2739                         int l1;                                         \
2740                                                                         \
2741                         l1 = gen_new_label();                           \
2742                         r_cond = tcg_temp_new();                        \
2743                         cond = GET_FIELD_SP(insn, 14, 17);              \
2744                         gen_cond(r_cond, icc, cond, dc);                \
2745                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2746                                            0, l1);                      \
2747                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2748                                         cpu_fpr[DFPREG(rs2)]);          \
2749                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2750                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2751                         gen_set_label(l1);                              \
2752                         tcg_temp_free(r_cond);                          \
2753                     }
2754 #define FMOVQCC(icc)                                                    \
2755                     {                                                   \
2756                         TCGv r_cond;                                    \
2757                         int l1;                                         \
2758                                                                         \
2759                         l1 = gen_new_label();                           \
2760                         r_cond = tcg_temp_new();                        \
2761                         cond = GET_FIELD_SP(insn, 14, 17);              \
2762                         gen_cond(r_cond, icc, cond, dc);                \
2763                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2764                                            0, l1);                      \
2765                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2766                                         cpu_fpr[QFPREG(rs2)]);          \
2767                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2768                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2769                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2770                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2771                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2772                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2773                         gen_set_label(l1);                              \
2774                         tcg_temp_free(r_cond);                          \
2775                     }
2776
2777                     case 0x101: /* V9 fmovscc %icc */
2778                         FMOVSCC(0);
2779                         break;
2780                     case 0x102: /* V9 fmovdcc %icc */
2781                         FMOVDCC(0);
2782                     case 0x103: /* V9 fmovqcc %icc */
2783                         CHECK_FPU_FEATURE(dc, FLOAT128);
2784                         FMOVQCC(0);
2785                         break;
2786                     case 0x181: /* V9 fmovscc %xcc */
2787                         FMOVSCC(1);
2788                         break;
2789                     case 0x182: /* V9 fmovdcc %xcc */
2790                         FMOVDCC(1);
2791                         break;
2792                     case 0x183: /* V9 fmovqcc %xcc */
2793                         CHECK_FPU_FEATURE(dc, FLOAT128);
2794                         FMOVQCC(1);
2795                         break;
2796 #undef FMOVSCC
2797 #undef FMOVDCC
2798 #undef FMOVQCC
2799 #endif
2800                     case 0x51: /* fcmps, V9 %fcc */
2801                         gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2802                         break;
2803                     case 0x52: /* fcmpd, V9 %fcc */
2804                         gen_op_load_fpr_DT0(DFPREG(rs1));
2805                         gen_op_load_fpr_DT1(DFPREG(rs2));
2806                         gen_op_fcmpd(rd & 3);
2807                         break;
2808                     case 0x53: /* fcmpq, V9 %fcc */
2809                         CHECK_FPU_FEATURE(dc, FLOAT128);
2810                         gen_op_load_fpr_QT0(QFPREG(rs1));
2811                         gen_op_load_fpr_QT1(QFPREG(rs2));
2812                         gen_op_fcmpq(rd & 3);
2813                         break;
2814                     case 0x55: /* fcmpes, V9 %fcc */
2815                         gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2816                         break;
2817                     case 0x56: /* fcmped, V9 %fcc */
2818                         gen_op_load_fpr_DT0(DFPREG(rs1));
2819                         gen_op_load_fpr_DT1(DFPREG(rs2));
2820                         gen_op_fcmped(rd & 3);
2821                         break;
2822                     case 0x57: /* fcmpeq, V9 %fcc */
2823                         CHECK_FPU_FEATURE(dc, FLOAT128);
2824                         gen_op_load_fpr_QT0(QFPREG(rs1));
2825                         gen_op_load_fpr_QT1(QFPREG(rs2));
2826                         gen_op_fcmpeq(rd & 3);
2827                         break;
2828                     default:
2829                         goto illegal_insn;
2830                 }
2831             } else if (xop == 0x2) {
2832                 // clr/mov shortcut
2833
2834                 rs1 = GET_FIELD(insn, 13, 17);
2835                 if (rs1 == 0) {
2836                     // or %g0, x, y -> mov T0, x; mov y, T0
2837                     if (IS_IMM) {       /* immediate */
2838                         TCGv r_const;
2839
2840                         simm = GET_FIELDs(insn, 19, 31);
2841                         r_const = tcg_const_tl(simm);
2842                         gen_movl_TN_reg(rd, r_const);
2843                         tcg_temp_free(r_const);
2844                     } else {            /* register */
2845                         rs2 = GET_FIELD(insn, 27, 31);
2846                         gen_movl_reg_TN(rs2, cpu_dst);
2847                         gen_movl_TN_reg(rd, cpu_dst);
2848                     }
2849                 } else {
2850                     cpu_src1 = get_src1(insn, cpu_src1);
2851                     if (IS_IMM) {       /* immediate */
2852                         simm = GET_FIELDs(insn, 19, 31);
2853                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2854                         gen_movl_TN_reg(rd, cpu_dst);
2855                     } else {            /* register */
2856                         // or x, %g0, y -> mov T1, x; mov y, T1
2857                         rs2 = GET_FIELD(insn, 27, 31);
2858                         if (rs2 != 0) {
2859                             gen_movl_reg_TN(rs2, cpu_src2);
2860                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2861                             gen_movl_TN_reg(rd, cpu_dst);
2862                         } else
2863                             gen_movl_TN_reg(rd, cpu_src1);
2864                     }
2865                 }
2866 #ifdef TARGET_SPARC64
2867             } else if (xop == 0x25) { /* sll, V9 sllx */
2868                 cpu_src1 = get_src1(insn, cpu_src1);
2869                 if (IS_IMM) {   /* immediate */
2870                     simm = GET_FIELDs(insn, 20, 31);
2871                     if (insn & (1 << 12)) {
2872                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2873                     } else {
2874                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2875                     }
2876                 } else {                /* register */
2877                     rs2 = GET_FIELD(insn, 27, 31);
2878                     gen_movl_reg_TN(rs2, cpu_src2);
2879                     if (insn & (1 << 12)) {
2880                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2881                     } else {
2882                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2883                     }
2884                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2885                 }
2886                 gen_movl_TN_reg(rd, cpu_dst);
2887             } else if (xop == 0x26) { /* srl, V9 srlx */
2888                 cpu_src1 = get_src1(insn, cpu_src1);
2889                 if (IS_IMM) {   /* immediate */
2890                     simm = GET_FIELDs(insn, 20, 31);
2891                     if (insn & (1 << 12)) {
2892                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2893                     } else {
2894                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2895                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2896                     }
2897                 } else {                /* register */
2898                     rs2 = GET_FIELD(insn, 27, 31);
2899                     gen_movl_reg_TN(rs2, cpu_src2);
2900                     if (insn & (1 << 12)) {
2901                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2902                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2903                     } else {
2904                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2905                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2906                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2907                     }
2908                 }
2909                 gen_movl_TN_reg(rd, cpu_dst);
2910             } else if (xop == 0x27) { /* sra, V9 srax */
2911                 cpu_src1 = get_src1(insn, cpu_src1);
2912                 if (IS_IMM) {   /* immediate */
2913                     simm = GET_FIELDs(insn, 20, 31);
2914                     if (insn & (1 << 12)) {
2915                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2916                     } else {
2917                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2918                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2919                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2920                     }
2921                 } else {                /* register */
2922                     rs2 = GET_FIELD(insn, 27, 31);
2923                     gen_movl_reg_TN(rs2, cpu_src2);
2924                     if (insn & (1 << 12)) {
2925                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2926                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2927                     } else {
2928                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2929                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2930                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2931                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2932                     }
2933                 }
2934                 gen_movl_TN_reg(rd, cpu_dst);
2935 #endif
2936             } else if (xop < 0x36) {
2937                 if (xop < 0x20) {
2938                     cpu_src1 = get_src1(insn, cpu_src1);
2939                     cpu_src2 = get_src2(insn, cpu_src2);
2940                     switch (xop & ~0x10) {
2941                     case 0x0: /* add */
2942                         if (IS_IMM) {
2943                             simm = GET_FIELDs(insn, 19, 31);
2944                             if (xop & 0x10) {
2945                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2946                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2947                                 dc->cc_op = CC_OP_ADD;
2948                             } else {
2949                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2950                             }
2951                         } else {
2952                             if (xop & 0x10) {
2953                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2954                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2955                                 dc->cc_op = CC_OP_ADD;
2956                             } else {
2957                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2958                             }
2959                         }
2960                         break;
2961                     case 0x1: /* and */
2962                         if (IS_IMM) {
2963                             simm = GET_FIELDs(insn, 19, 31);
2964                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2965                         } else {
2966                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2967                         }
2968                         if (xop & 0x10) {
2969                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2970                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2971                             dc->cc_op = CC_OP_LOGIC;
2972                         }
2973                         break;
2974                     case 0x2: /* or */
2975                         if (IS_IMM) {
2976                             simm = GET_FIELDs(insn, 19, 31);
2977                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2978                         } else {
2979                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2980                         }
2981                         if (xop & 0x10) {
2982                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2983                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2984                             dc->cc_op = CC_OP_LOGIC;
2985                         }
2986                         break;
2987                     case 0x3: /* xor */
2988                         if (IS_IMM) {
2989                             simm = GET_FIELDs(insn, 19, 31);
2990                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2991                         } else {
2992                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2993                         }
2994                         if (xop & 0x10) {
2995                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2996                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2997                             dc->cc_op = CC_OP_LOGIC;
2998                         }
2999                         break;
3000                     case 0x4: /* sub */
3001                         if (IS_IMM) {
3002                             simm = GET_FIELDs(insn, 19, 31);
3003                             if (xop & 0x10) {
3004                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3005                             } else {
3006                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3007                             }
3008                         } else {
3009                             if (xop & 0x10) {
3010                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3011                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3012                                 dc->cc_op = CC_OP_SUB;
3013                             } else {
3014                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3015                             }
3016                         }
3017                         break;
3018                     case 0x5: /* andn */
3019                         if (IS_IMM) {
3020                             simm = GET_FIELDs(insn, 19, 31);
3021                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3022                         } else {
3023                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3024                         }
3025                         if (xop & 0x10) {
3026                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3027                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3028                             dc->cc_op = CC_OP_LOGIC;
3029                         }
3030                         break;
3031                     case 0x6: /* orn */
3032                         if (IS_IMM) {
3033                             simm = GET_FIELDs(insn, 19, 31);
3034                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3035                         } else {
3036                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3037                         }
3038                         if (xop & 0x10) {
3039                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3040                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3041                             dc->cc_op = CC_OP_LOGIC;
3042                         }
3043                         break;
3044                     case 0x7: /* xorn */
3045                         if (IS_IMM) {
3046                             simm = GET_FIELDs(insn, 19, 31);
3047                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3048                         } else {
3049                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3050                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3051                         }
3052                         if (xop & 0x10) {
3053                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3054                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3055                             dc->cc_op = CC_OP_LOGIC;
3056                         }
3057                         break;
3058                     case 0x8: /* addx, V9 addc */
3059                         if (IS_IMM) {
3060                             simm = GET_FIELDs(insn, 19, 31);
3061                             if (xop & 0x10) {
3062                                 gen_helper_compute_psr();
3063                                 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3064                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3065                                 dc->cc_op = CC_OP_ADDX;
3066                             } else {
3067                                 gen_helper_compute_psr();
3068                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3069                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3070                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3071                             }
3072                         } else {
3073                             if (xop & 0x10) {
3074                                 gen_helper_compute_psr();
3075                                 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3076                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
3077                                 dc->cc_op = CC_OP_ADDX;
3078                             } else {
3079                                 gen_helper_compute_psr();
3080                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3081                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3082                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3083                             }
3084                         }
3085                         break;
3086 #ifdef TARGET_SPARC64
3087                     case 0x9: /* V9 mulx */
3088                         if (IS_IMM) {
3089                             simm = GET_FIELDs(insn, 19, 31);
3090                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3091                         } else {
3092                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3093                         }
3094                         break;
3095 #endif
3096                     case 0xa: /* umul */
3097                         CHECK_IU_FEATURE(dc, MUL);
3098                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3099                         if (xop & 0x10) {
3100                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3101                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3102                             dc->cc_op = CC_OP_LOGIC;
3103                         }
3104                         break;
3105                     case 0xb: /* smul */
3106                         CHECK_IU_FEATURE(dc, MUL);
3107                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3108                         if (xop & 0x10) {
3109                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3110                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3111                             dc->cc_op = CC_OP_LOGIC;
3112                         }
3113                         break;
3114                     case 0xc: /* subx, V9 subc */
3115                         if (IS_IMM) {
3116                             simm = GET_FIELDs(insn, 19, 31);
3117                             if (xop & 0x10) {
3118                                 gen_helper_compute_psr();
3119                                 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3120                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3121                                 dc->cc_op = CC_OP_SUBX;
3122                             } else {
3123                                 gen_helper_compute_psr();
3124                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3125                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3126                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3127                             }
3128                         } else {
3129                             if (xop & 0x10) {
3130                                 gen_helper_compute_psr();
3131                                 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3132                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3133                                 dc->cc_op = CC_OP_SUBX;
3134                             } else {
3135                                 gen_helper_compute_psr();
3136                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3137                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3138                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3139                             }
3140                         }
3141                         break;
3142 #ifdef TARGET_SPARC64
3143                     case 0xd: /* V9 udivx */
3144                         tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3145                         tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3146                         gen_trap_ifdivzero_tl(cpu_cc_src2);
3147                         tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3148                         break;
3149 #endif
3150                     case 0xe: /* udiv */
3151                         CHECK_IU_FEATURE(dc, DIV);
3152                         gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3153                         if (xop & 0x10) {
3154                             gen_op_div_cc(cpu_dst);
3155                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3156                             dc->cc_op = CC_OP_FLAGS;
3157                         }
3158                         break;
3159                     case 0xf: /* sdiv */
3160                         CHECK_IU_FEATURE(dc, DIV);
3161                         gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3162                         if (xop & 0x10) {
3163                             gen_op_div_cc(cpu_dst);
3164                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3165                             dc->cc_op = CC_OP_FLAGS;
3166                         }
3167                         break;
3168                     default:
3169                         goto illegal_insn;
3170                     }
3171                     gen_movl_TN_reg(rd, cpu_dst);
3172                 } else {
3173                     cpu_src1 = get_src1(insn, cpu_src1);
3174                     cpu_src2 = get_src2(insn, cpu_src2);
3175                     switch (xop) {
3176                     case 0x20: /* taddcc */
3177                         gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3178                         gen_movl_TN_reg(rd, cpu_dst);
3179                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3180                         dc->cc_op = CC_OP_TADD;
3181                         break;
3182                     case 0x21: /* tsubcc */
3183                         gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3184                         gen_movl_TN_reg(rd, cpu_dst);
3185                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3186                         dc->cc_op = CC_OP_TSUB;
3187                         break;
3188                     case 0x22: /* taddcctv */
3189                         save_state(dc, cpu_cond);
3190                         gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3191                         gen_movl_TN_reg(rd, cpu_dst);
3192                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3193                         dc->cc_op = CC_OP_TADDTV;
3194                         break;
3195                     case 0x23: /* tsubcctv */
3196                         save_state(dc, cpu_cond);
3197                         gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3198                         gen_movl_TN_reg(rd, cpu_dst);
3199                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3200                         dc->cc_op = CC_OP_TSUBTV;
3201                         break;
3202                     case 0x24: /* mulscc */
3203                         gen_helper_compute_psr();
3204                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3205                         gen_movl_TN_reg(rd, cpu_dst);
3206                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3207                         dc->cc_op = CC_OP_FLAGS;
3208                         break;
3209 #ifndef TARGET_SPARC64
3210                     case 0x25:  /* sll */
3211                         if (IS_IMM) { /* immediate */
3212                             simm = GET_FIELDs(insn, 20, 31);
3213                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3214                         } else { /* register */
3215                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3216                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3217                         }
3218                         gen_movl_TN_reg(rd, cpu_dst);
3219                         break;
3220                     case 0x26:  /* srl */
3221                         if (IS_IMM) { /* immediate */
3222                             simm = GET_FIELDs(insn, 20, 31);
3223                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3224                         } else { /* register */
3225                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3226                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3227                         }
3228                         gen_movl_TN_reg(rd, cpu_dst);
3229                         break;
3230                     case 0x27:  /* sra */
3231                         if (IS_IMM) { /* immediate */
3232                             simm = GET_FIELDs(insn, 20, 31);
3233                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3234                         } else { /* register */
3235                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3236                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3237                         }
3238                         gen_movl_TN_reg(rd, cpu_dst);
3239                         break;
3240 #endif
3241                     case 0x30:
3242                         {
3243                             switch(rd) {
3244                             case 0: /* wry */
3245                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3246                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3247                                 break;
3248 #ifndef TARGET_SPARC64
3249                             case 0x01 ... 0x0f: /* undefined in the
3250                                                    SPARCv8 manual, nop
3251                                                    on the microSPARC
3252                                                    II */
3253                             case 0x10 ... 0x1f: /* implementation-dependent
3254                                                    in the SPARCv8
3255                                                    manual, nop on the
3256                                                    microSPARC II */
3257                                 break;
3258 #else
3259                             case 0x2: /* V9 wrccr */
3260                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3261                                 gen_helper_wrccr(cpu_dst);
3262                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3263                                 dc->cc_op = CC_OP_FLAGS;
3264                                 break;
3265                             case 0x3: /* V9 wrasi */
3266                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3267                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3268                                 break;
3269                             case 0x6: /* V9 wrfprs */
3270                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3271                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3272                                 save_state(dc, cpu_cond);
3273                                 gen_op_next_insn();
3274                                 tcg_gen_exit_tb(0);
3275                                 dc->is_br = 1;
3276                                 break;
3277                             case 0xf: /* V9 sir, nop if user */
3278 #if !defined(CONFIG_USER_ONLY)
3279                                 if (supervisor(dc))
3280                                     ; // XXX
3281 #endif
3282                                 break;
3283                             case 0x13: /* Graphics Status */
3284                                 if (gen_trap_ifnofpu(dc, cpu_cond))
3285                                     goto jmp_insn;
3286                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3287                                 break;
3288                             case 0x14: /* Softint set */
3289                                 if (!supervisor(dc))
3290                                     goto illegal_insn;
3291                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3292                                 gen_helper_set_softint(cpu_tmp64);
3293                                 break;
3294                             case 0x15: /* Softint clear */
3295                                 if (!supervisor(dc))
3296                                     goto illegal_insn;
3297                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3298                                 gen_helper_clear_softint(cpu_tmp64);
3299                                 break;
3300                             case 0x16: /* Softint write */
3301                                 if (!supervisor(dc))
3302                                     goto illegal_insn;
3303                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3304                                 gen_helper_write_softint(cpu_tmp64);
3305                                 break;
3306                             case 0x17: /* Tick compare */
3307 #if !defined(CONFIG_USER_ONLY)
3308                                 if (!supervisor(dc))
3309                                     goto illegal_insn;
3310 #endif
3311                                 {
3312                                     TCGv_ptr r_tickptr;
3313
3314                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3315                                                    cpu_src2);
3316                                     r_tickptr = tcg_temp_new_ptr();
3317                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3318                                                    offsetof(CPUState, tick));
3319                                     gen_helper_tick_set_limit(r_tickptr,
3320                                                               cpu_tick_cmpr);
3321                                     tcg_temp_free_ptr(r_tickptr);
3322                                 }
3323                                 break;
3324                             case 0x18: /* System tick */
3325 #if !defined(CONFIG_USER_ONLY)
3326                                 if (!supervisor(dc))
3327                                     goto illegal_insn;
3328 #endif
3329                                 {
3330                                     TCGv_ptr r_tickptr;
3331
3332                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3333                                                    cpu_src2);
3334                                     r_tickptr = tcg_temp_new_ptr();
3335                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3336                                                    offsetof(CPUState, stick));
3337                                     gen_helper_tick_set_count(r_tickptr,
3338                                                               cpu_dst);
3339                                     tcg_temp_free_ptr(r_tickptr);
3340                                 }
3341                                 break;
3342                             case 0x19: /* System tick compare */
3343 #if !defined(CONFIG_USER_ONLY)
3344                                 if (!supervisor(dc))
3345                                     goto illegal_insn;
3346 #endif
3347                                 {
3348                                     TCGv_ptr r_tickptr;
3349
3350                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3351                                                    cpu_src2);
3352                                     r_tickptr = tcg_temp_new_ptr();
3353                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3354                                                    offsetof(CPUState, stick));
3355                                     gen_helper_tick_set_limit(r_tickptr,
3356                                                               cpu_stick_cmpr);
3357                                     tcg_temp_free_ptr(r_tickptr);
3358                                 }
3359                                 break;
3360
3361                             case 0x10: /* Performance Control */
3362                             case 0x11: /* Performance Instrumentation
3363                                           Counter */
3364                             case 0x12: /* Dispatch Control */
3365 #endif
3366                             default:
3367                                 goto illegal_insn;
3368                             }
3369                         }
3370                         break;
3371 #if !defined(CONFIG_USER_ONLY)
3372                     case 0x31: /* wrpsr, V9 saved, restored */
3373                         {
3374                             if (!supervisor(dc))
3375                                 goto priv_insn;
3376 #ifdef TARGET_SPARC64
3377                             switch (rd) {
3378                             case 0:
3379                                 gen_helper_saved();
3380                                 break;
3381                             case 1:
3382                                 gen_helper_restored();
3383                                 break;
3384                             case 2: /* UA2005 allclean */
3385                             case 3: /* UA2005 otherw */
3386                             case 4: /* UA2005 normalw */
3387                             case 5: /* UA2005 invalw */
3388                                 // XXX
3389                             default:
3390                                 goto illegal_insn;
3391                             }
3392 #else
3393                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3394                             gen_helper_wrpsr(cpu_dst);
3395                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3396                             dc->cc_op = CC_OP_FLAGS;
3397                             save_state(dc, cpu_cond);
3398                             gen_op_next_insn();
3399                             tcg_gen_exit_tb(0);
3400                             dc->is_br = 1;
3401 #endif
3402                         }
3403                         break;
3404                     case 0x32: /* wrwim, V9 wrpr */
3405                         {
3406                             if (!supervisor(dc))
3407                                 goto priv_insn;
3408                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3409 #ifdef TARGET_SPARC64
3410                             switch (rd) {
3411                             case 0: // tpc
3412                                 {
3413                                     TCGv_ptr r_tsptr;
3414
3415                                     r_tsptr = tcg_temp_new_ptr();
3416                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3417                                                    offsetof(CPUState, tsptr));
3418                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3419                                                   offsetof(trap_state, tpc));
3420                                     tcg_temp_free_ptr(r_tsptr);
3421                                 }
3422                                 break;
3423                             case 1: // tnpc
3424                                 {
3425                                     TCGv_ptr r_tsptr;
3426
3427                                     r_tsptr = tcg_temp_new_ptr();
3428                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3429                                                    offsetof(CPUState, tsptr));
3430                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3431                                                   offsetof(trap_state, tnpc));
3432                                     tcg_temp_free_ptr(r_tsptr);
3433                                 }
3434                                 break;
3435                             case 2: // tstate
3436                                 {
3437                                     TCGv_ptr r_tsptr;
3438
3439                                     r_tsptr = tcg_temp_new_ptr();
3440                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3441                                                    offsetof(CPUState, tsptr));
3442                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3443                                                   offsetof(trap_state,
3444                                                            tstate));
3445                                     tcg_temp_free_ptr(r_tsptr);
3446                                 }
3447                                 break;
3448                             case 3: // tt
3449                                 {
3450                                     TCGv_ptr r_tsptr;
3451
3452                                     r_tsptr = tcg_temp_new_ptr();
3453                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3454                                                    offsetof(CPUState, tsptr));
3455                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3456                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3457                                                    offsetof(trap_state, tt));
3458                                     tcg_temp_free_ptr(r_tsptr);
3459                                 }
3460                                 break;
3461                             case 4: // tick
3462                                 {
3463                                     TCGv_ptr r_tickptr;
3464
3465                                     r_tickptr = tcg_temp_new_ptr();
3466                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3467                                                    offsetof(CPUState, tick));
3468                                     gen_helper_tick_set_count(r_tickptr,
3469                                                               cpu_tmp0);
3470                                     tcg_temp_free_ptr(r_tickptr);
3471                                 }
3472                                 break;
3473                             case 5: // tba
3474                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3475                                 break;
3476                             case 6: // pstate
3477                                 save_state(dc, cpu_cond);
3478                                 gen_helper_wrpstate(cpu_tmp0);
3479                                 gen_op_next_insn();
3480                                 tcg_gen_exit_tb(0);
3481                                 dc->is_br = 1;
3482                                 break;
3483                             case 7: // tl
3484                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3485                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3486                                                offsetof(CPUSPARCState, tl));
3487                                 break;
3488                             case 8: // pil
3489                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3490                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3491                                                offsetof(CPUSPARCState,
3492                                                         psrpil));
3493                                 break;
3494                             case 9: // cwp
3495                                 gen_helper_wrcwp(cpu_tmp0);
3496                                 break;
3497                             case 10: // cansave
3498                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3499                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3500                                                offsetof(CPUSPARCState,
3501                                                         cansave));
3502                                 break;
3503                             case 11: // canrestore
3504                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3505                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3506                                                offsetof(CPUSPARCState,
3507                                                         canrestore));
3508                                 break;
3509                             case 12: // cleanwin
3510                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3511                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3512                                                offsetof(CPUSPARCState,
3513                                                         cleanwin));
3514                                 break;
3515                             case 13: // otherwin
3516                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3517                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3518                                                offsetof(CPUSPARCState,
3519                                                         otherwin));
3520                                 break;
3521                             case 14: // wstate
3522                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3523                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3524                                                offsetof(CPUSPARCState,
3525                                                         wstate));
3526                                 break;
3527                             case 16: // UA2005 gl
3528                                 CHECK_IU_FEATURE(dc, GL);
3529                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3530                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3531                                                offsetof(CPUSPARCState, gl));
3532                                 break;
3533                             case 26: // UA2005 strand status
3534                                 CHECK_IU_FEATURE(dc, HYPV);
3535                                 if (!hypervisor(dc))
3536                                     goto priv_insn;
3537                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3538                                 break;
3539                             default:
3540                                 goto illegal_insn;
3541                             }
3542 #else
3543                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3544                             if (dc->def->nwindows != 32)
3545                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3546                                                 (1 << dc->def->nwindows) - 1);
3547                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3548 #endif
3549                         }
3550                         break;
3551                     case 0x33: /* wrtbr, UA2005 wrhpr */
3552                         {
3553 #ifndef TARGET_SPARC64
3554                             if (!supervisor(dc))
3555                                 goto priv_insn;
3556                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3557 #else
3558                             CHECK_IU_FEATURE(dc, HYPV);
3559                             if (!hypervisor(dc))
3560                                 goto priv_insn;
3561                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3562                             switch (rd) {
3563                             case 0: // hpstate
3564                                 // XXX gen_op_wrhpstate();
3565                                 save_state(dc, cpu_cond);
3566                                 gen_op_next_insn();
3567                                 tcg_gen_exit_tb(0);
3568                                 dc->is_br = 1;
3569                                 break;
3570                             case 1: // htstate
3571                                 // XXX gen_op_wrhtstate();
3572                                 break;
3573                             case 3: // hintp
3574                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3575                                 break;
3576                             case 5: // htba
3577                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3578                                 break;
3579                             case 31: // hstick_cmpr
3580                                 {
3581                                     TCGv_ptr r_tickptr;
3582
3583                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3584                                     r_tickptr = tcg_temp_new_ptr();
3585                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3586                                                    offsetof(CPUState, hstick));
3587                                     gen_helper_tick_set_limit(r_tickptr,
3588                                                               cpu_hstick_cmpr);
3589                                     tcg_temp_free_ptr(r_tickptr);
3590                                 }
3591                                 break;
3592                             case 6: // hver readonly
3593                             default:
3594                                 goto illegal_insn;
3595                             }
3596 #endif
3597                         }
3598                         break;
3599 #endif
3600 #ifdef TARGET_SPARC64
3601                     case 0x2c: /* V9 movcc */
3602                         {
3603                             int cc = GET_FIELD_SP(insn, 11, 12);
3604                             int cond = GET_FIELD_SP(insn, 14, 17);
3605                             TCGv r_cond;
3606                             int l1;
3607
3608                             r_cond = tcg_temp_new();
3609                             if (insn & (1 << 18)) {
3610                                 if (cc == 0)
3611                                     gen_cond(r_cond, 0, cond, dc);
3612                                 else if (cc == 2)
3613                                     gen_cond(r_cond, 1, cond, dc);
3614                                 else
3615                                     goto illegal_insn;
3616                             } else {
3617                                 gen_fcond(r_cond, cc, cond);
3618                             }
3619
3620                             l1 = gen_new_label();
3621
3622                             tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3623                             if (IS_IMM) {       /* immediate */
3624                                 TCGv r_const;
3625
3626                                 simm = GET_FIELD_SPs(insn, 0, 10);
3627                                 r_const = tcg_const_tl(simm);
3628                                 gen_movl_TN_reg(rd, r_const);
3629                                 tcg_temp_free(r_const);
3630                             } else {
3631                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3632                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3633                                 gen_movl_TN_reg(rd, cpu_tmp0);
3634                             }
3635                             gen_set_label(l1);
3636                             tcg_temp_free(r_cond);
3637                             break;
3638                         }
3639                     case 0x2d: /* V9 sdivx */
3640                         gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3641                         gen_movl_TN_reg(rd, cpu_dst);
3642                         break;
3643                     case 0x2e: /* V9 popc */
3644                         {
3645                             cpu_src2 = get_src2(insn, cpu_src2);
3646                             gen_helper_popc(cpu_dst, cpu_src2);
3647                             gen_movl_TN_reg(rd, cpu_dst);
3648                         }
3649                     case 0x2f: /* V9 movr */
3650                         {
3651                             int cond = GET_FIELD_SP(insn, 10, 12);
3652                             int l1;
3653
3654                             cpu_src1 = get_src1(insn, cpu_src1);
3655
3656                             l1 = gen_new_label();
3657
3658                             tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3659                                               cpu_src1, 0, l1);
3660                             if (IS_IMM) {       /* immediate */
3661                                 TCGv r_const;
3662
3663                                 simm = GET_FIELD_SPs(insn, 0, 9);
3664                                 r_const = tcg_const_tl(simm);
3665                                 gen_movl_TN_reg(rd, r_const);
3666                                 tcg_temp_free(r_const);
3667                             } else {
3668                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3669                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3670                                 gen_movl_TN_reg(rd, cpu_tmp0);
3671                             }
3672                             gen_set_label(l1);
3673                             break;
3674                         }
3675 #endif
3676                     default:
3677                         goto illegal_insn;
3678                     }
3679                 }
3680             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3681 #ifdef TARGET_SPARC64
3682                 int opf = GET_FIELD_SP(insn, 5, 13);
3683                 rs1 = GET_FIELD(insn, 13, 17);
3684                 rs2 = GET_FIELD(insn, 27, 31);
3685                 if (gen_trap_ifnofpu(dc, cpu_cond))
3686                     goto jmp_insn;
3687
3688                 switch (opf) {
3689                 case 0x000: /* VIS I edge8cc */
3690                 case 0x001: /* VIS II edge8n */
3691                 case 0x002: /* VIS I edge8lcc */
3692                 case 0x003: /* VIS II edge8ln */
3693                 case 0x004: /* VIS I edge16cc */
3694                 case 0x005: /* VIS II edge16n */
3695                 case 0x006: /* VIS I edge16lcc */
3696                 case 0x007: /* VIS II edge16ln */
3697                 case 0x008: /* VIS I edge32cc */
3698                 case 0x009: /* VIS II edge32n */
3699                 case 0x00a: /* VIS I edge32lcc */
3700                 case 0x00b: /* VIS II edge32ln */
3701                     // XXX
3702                     goto illegal_insn;
3703                 case 0x010: /* VIS I array8 */
3704                     CHECK_FPU_FEATURE(dc, VIS1);
3705                     cpu_src1 = get_src1(insn, cpu_src1);
3706                     gen_movl_reg_TN(rs2, cpu_src2);
3707                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3708                     gen_movl_TN_reg(rd, cpu_dst);
3709                     break;
3710                 case 0x012: /* VIS I array16 */
3711                     CHECK_FPU_FEATURE(dc, VIS1);
3712                     cpu_src1 = get_src1(insn, cpu_src1);
3713                     gen_movl_reg_TN(rs2, cpu_src2);
3714                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3715                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3716                     gen_movl_TN_reg(rd, cpu_dst);
3717                     break;
3718                 case 0x014: /* VIS I array32 */
3719                     CHECK_FPU_FEATURE(dc, VIS1);
3720                     cpu_src1 = get_src1(insn, cpu_src1);
3721                     gen_movl_reg_TN(rs2, cpu_src2);
3722                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3723                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3724                     gen_movl_TN_reg(rd, cpu_dst);
3725                     break;
3726                 case 0x018: /* VIS I alignaddr */
3727                     CHECK_FPU_FEATURE(dc, VIS1);
3728                     cpu_src1 = get_src1(insn, cpu_src1);
3729                     gen_movl_reg_TN(rs2, cpu_src2);
3730                     gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3731                     gen_movl_TN_reg(rd, cpu_dst);
3732                     break;
3733                 case 0x019: /* VIS II bmask */
3734                 case 0x01a: /* VIS I alignaddrl */
3735                     // XXX
3736                     goto illegal_insn;
3737                 case 0x020: /* VIS I fcmple16 */
3738                     CHECK_FPU_FEATURE(dc, VIS1);
3739                     gen_op_load_fpr_DT0(DFPREG(rs1));
3740                     gen_op_load_fpr_DT1(DFPREG(rs2));
3741                     gen_helper_fcmple16();
3742                     gen_op_store_DT0_fpr(DFPREG(rd));
3743                     break;
3744                 case 0x022: /* VIS I fcmpne16 */
3745                     CHECK_FPU_FEATURE(dc, VIS1);
3746                     gen_op_load_fpr_DT0(DFPREG(rs1));
3747                     gen_op_load_fpr_DT1(DFPREG(rs2));
3748                     gen_helper_fcmpne16();
3749                     gen_op_store_DT0_fpr(DFPREG(rd));
3750                     break;
3751                 case 0x024: /* VIS I fcmple32 */
3752                     CHECK_FPU_FEATURE(dc, VIS1);
3753                     gen_op_load_fpr_DT0(DFPREG(rs1));
3754                     gen_op_load_fpr_DT1(DFPREG(rs2));
3755                     gen_helper_fcmple32();
3756                     gen_op_store_DT0_fpr(DFPREG(rd));
3757                     break;
3758                 case 0x026: /* VIS I fcmpne32 */
3759                     CHECK_FPU_FEATURE(dc, VIS1);
3760                     gen_op_load_fpr_DT0(DFPREG(rs1));
3761                     gen_op_load_fpr_DT1(DFPREG(rs2));
3762                     gen_helper_fcmpne32();
3763                     gen_op_store_DT0_fpr(DFPREG(rd));
3764                     break;
3765                 case 0x028: /* VIS I fcmpgt16 */
3766                     CHECK_FPU_FEATURE(dc, VIS1);
3767                     gen_op_load_fpr_DT0(DFPREG(rs1));
3768                     gen_op_load_fpr_DT1(DFPREG(rs2));
3769                     gen_helper_fcmpgt16();
3770                     gen_op_store_DT0_fpr(DFPREG(rd));
3771                     break;
3772                 case 0x02a: /* VIS I fcmpeq16 */
3773                     CHECK_FPU_FEATURE(dc, VIS1);
3774                     gen_op_load_fpr_DT0(DFPREG(rs1));
3775                     gen_op_load_fpr_DT1(DFPREG(rs2));
3776                     gen_helper_fcmpeq16();
3777                     gen_op_store_DT0_fpr(DFPREG(rd));
3778                     break;
3779                 case 0x02c: /* VIS I fcmpgt32 */
3780                     CHECK_FPU_FEATURE(dc, VIS1);
3781                     gen_op_load_fpr_DT0(DFPREG(rs1));
3782                     gen_op_load_fpr_DT1(DFPREG(rs2));
3783                     gen_helper_fcmpgt32();
3784                     gen_op_store_DT0_fpr(DFPREG(rd));
3785                     break;
3786                 case 0x02e: /* VIS I fcmpeq32 */
3787                     CHECK_FPU_FEATURE(dc, VIS1);
3788                     gen_op_load_fpr_DT0(DFPREG(rs1));
3789                     gen_op_load_fpr_DT1(DFPREG(rs2));
3790                     gen_helper_fcmpeq32();
3791                     gen_op_store_DT0_fpr(DFPREG(rd));
3792                     break;
3793                 case 0x031: /* VIS I fmul8x16 */
3794                     CHECK_FPU_FEATURE(dc, VIS1);
3795                     gen_op_load_fpr_DT0(DFPREG(rs1));
3796                     gen_op_load_fpr_DT1(DFPREG(rs2));
3797                     gen_helper_fmul8x16();
3798                     gen_op_store_DT0_fpr(DFPREG(rd));
3799                     break;
3800                 case 0x033: /* VIS I fmul8x16au */
3801                     CHECK_FPU_FEATURE(dc, VIS1);
3802                     gen_op_load_fpr_DT0(DFPREG(rs1));
3803                     gen_op_load_fpr_DT1(DFPREG(rs2));
3804                     gen_helper_fmul8x16au();
3805                     gen_op_store_DT0_fpr(DFPREG(rd));
3806                     break;
3807                 case 0x035: /* VIS I fmul8x16al */
3808                     CHECK_FPU_FEATURE(dc, VIS1);
3809                     gen_op_load_fpr_DT0(DFPREG(rs1));
3810                     gen_op_load_fpr_DT1(DFPREG(rs2));
3811                     gen_helper_fmul8x16al();
3812                     gen_op_store_DT0_fpr(DFPREG(rd));
3813                     break;
3814                 case 0x036: /* VIS I fmul8sux16 */
3815                     CHECK_FPU_FEATURE(dc, VIS1);
3816                     gen_op_load_fpr_DT0(DFPREG(rs1));
3817                     gen_op_load_fpr_DT1(DFPREG(rs2));
3818                     gen_helper_fmul8sux16();
3819                     gen_op_store_DT0_fpr(DFPREG(rd));
3820                     break;
3821                 case 0x037: /* VIS I fmul8ulx16 */
3822                     CHECK_FPU_FEATURE(dc, VIS1);
3823                     gen_op_load_fpr_DT0(DFPREG(rs1));
3824                     gen_op_load_fpr_DT1(DFPREG(rs2));
3825                     gen_helper_fmul8ulx16();
3826                     gen_op_store_DT0_fpr(DFPREG(rd));
3827                     break;
3828                 case 0x038: /* VIS I fmuld8sux16 */
3829                     CHECK_FPU_FEATURE(dc, VIS1);
3830                     gen_op_load_fpr_DT0(DFPREG(rs1));
3831                     gen_op_load_fpr_DT1(DFPREG(rs2));
3832                     gen_helper_fmuld8sux16();
3833                     gen_op_store_DT0_fpr(DFPREG(rd));
3834                     break;
3835                 case 0x039: /* VIS I fmuld8ulx16 */
3836                     CHECK_FPU_FEATURE(dc, VIS1);
3837                     gen_op_load_fpr_DT0(DFPREG(rs1));
3838                     gen_op_load_fpr_DT1(DFPREG(rs2));
3839                     gen_helper_fmuld8ulx16();
3840                     gen_op_store_DT0_fpr(DFPREG(rd));
3841                     break;
3842                 case 0x03a: /* VIS I fpack32 */
3843                 case 0x03b: /* VIS I fpack16 */
3844                 case 0x03d: /* VIS I fpackfix */
3845                 case 0x03e: /* VIS I pdist */
3846                     // XXX
3847                     goto illegal_insn;
3848                 case 0x048: /* VIS I faligndata */
3849                     CHECK_FPU_FEATURE(dc, VIS1);
3850                     gen_op_load_fpr_DT0(DFPREG(rs1));
3851                     gen_op_load_fpr_DT1(DFPREG(rs2));
3852                     gen_helper_faligndata();
3853                     gen_op_store_DT0_fpr(DFPREG(rd));
3854                     break;
3855                 case 0x04b: /* VIS I fpmerge */
3856                     CHECK_FPU_FEATURE(dc, VIS1);
3857                     gen_op_load_fpr_DT0(DFPREG(rs1));
3858                     gen_op_load_fpr_DT1(DFPREG(rs2));
3859                     gen_helper_fpmerge();
3860                     gen_op_store_DT0_fpr(DFPREG(rd));
3861                     break;
3862                 case 0x04c: /* VIS II bshuffle */
3863                     // XXX
3864                     goto illegal_insn;
3865                 case 0x04d: /* VIS I fexpand */
3866                     CHECK_FPU_FEATURE(dc, VIS1);
3867                     gen_op_load_fpr_DT0(DFPREG(rs1));
3868                     gen_op_load_fpr_DT1(DFPREG(rs2));
3869                     gen_helper_fexpand();
3870                     gen_op_store_DT0_fpr(DFPREG(rd));
3871                     break;
3872                 case 0x050: /* VIS I fpadd16 */
3873                     CHECK_FPU_FEATURE(dc, VIS1);
3874                     gen_op_load_fpr_DT0(DFPREG(rs1));
3875                     gen_op_load_fpr_DT1(DFPREG(rs2));
3876                     gen_helper_fpadd16();
3877                     gen_op_store_DT0_fpr(DFPREG(rd));
3878                     break;
3879                 case 0x051: /* VIS I fpadd16s */
3880                     CHECK_FPU_FEATURE(dc, VIS1);
3881                     gen_helper_fpadd16s(cpu_fpr[rd],
3882                                         cpu_fpr[rs1], cpu_fpr[rs2]);
3883                     break;
3884                 case 0x052: /* VIS I fpadd32 */
3885                     CHECK_FPU_FEATURE(dc, VIS1);
3886                     gen_op_load_fpr_DT0(DFPREG(rs1));
3887                     gen_op_load_fpr_DT1(DFPREG(rs2));
3888                     gen_helper_fpadd32();
3889                     gen_op_store_DT0_fpr(DFPREG(rd));
3890                     break;
3891                 case 0x053: /* VIS I fpadd32s */
3892                     CHECK_FPU_FEATURE(dc, VIS1);
3893                     gen_helper_fpadd32s(cpu_fpr[rd],
3894                                         cpu_fpr[rs1], cpu_fpr[rs2]);
3895                     break;
3896                 case 0x054: /* VIS I fpsub16 */
3897                     CHECK_FPU_FEATURE(dc, VIS1);
3898                     gen_op_load_fpr_DT0(DFPREG(rs1));
3899                     gen_op_load_fpr_DT1(DFPREG(rs2));
3900                     gen_helper_fpsub16();
3901                     gen_op_store_DT0_fpr(DFPREG(rd));
3902                     break;
3903                 case 0x055: /* VIS I fpsub16s */
3904                     CHECK_FPU_FEATURE(dc, VIS1);
3905                     gen_helper_fpsub16s(cpu_fpr[rd],
3906                                         cpu_fpr[rs1], cpu_fpr[rs2]);
3907                     break;
3908                 case 0x056: /* VIS I fpsub32 */
3909                     CHECK_FPU_FEATURE(dc, VIS1);
3910                     gen_op_load_fpr_DT0(DFPREG(rs1));
3911                     gen_op_load_fpr_DT1(DFPREG(rs2));
3912                     gen_helper_fpsub32();
3913                     gen_op_store_DT0_fpr(DFPREG(rd));
3914                     break;
3915                 case 0x057: /* VIS I fpsub32s */
3916                     CHECK_FPU_FEATURE(dc, VIS1);
3917                     gen_helper_fpsub32s(cpu_fpr[rd],
3918                                         cpu_fpr[rs1], cpu_fpr[rs2]);
3919                     break;
3920                 case 0x060: /* VIS I fzero */
3921                     CHECK_FPU_FEATURE(dc, VIS1);
3922                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3923                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3924                     break;
3925                 case 0x061: /* VIS I fzeros */
3926                     CHECK_FPU_FEATURE(dc, VIS1);
3927                     tcg_gen_movi_i32(cpu_fpr[rd], 0);
3928                     break;
3929                 case 0x062: /* VIS I fnor */
3930                     CHECK_FPU_FEATURE(dc, VIS1);
3931                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3932                                     cpu_fpr[DFPREG(rs2)]);
3933                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3934                                     cpu_fpr[DFPREG(rs2) + 1]);
3935                     break;
3936                 case 0x063: /* VIS I fnors */
3937                     CHECK_FPU_FEATURE(dc, VIS1);
3938                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3939                     break;
3940                 case 0x064: /* VIS I fandnot2 */
3941                     CHECK_FPU_FEATURE(dc, VIS1);
3942                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3943                                      cpu_fpr[DFPREG(rs2)]);
3944                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3945                                      cpu_fpr[DFPREG(rs1) + 1],
3946                                      cpu_fpr[DFPREG(rs2) + 1]);
3947                     break;
3948                 case 0x065: /* VIS I fandnot2s */
3949                     CHECK_FPU_FEATURE(dc, VIS1);
3950                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3951                     break;
3952                 case 0x066: /* VIS I fnot2 */
3953                     CHECK_FPU_FEATURE(dc, VIS1);
3954                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3955                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3956                                     cpu_fpr[DFPREG(rs2) + 1]);
3957                     break;
3958                 case 0x067: /* VIS I fnot2s */
3959                     CHECK_FPU_FEATURE(dc, VIS1);
3960                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3961                     break;
3962                 case 0x068: /* VIS I fandnot1 */
3963                     CHECK_FPU_FEATURE(dc, VIS1);
3964                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3965                                      cpu_fpr[DFPREG(rs1)]);
3966                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3967                                      cpu_fpr[DFPREG(rs2) + 1],
3968                                      cpu_fpr[DFPREG(rs1) + 1]);
3969                     break;
3970                 case 0x069: /* VIS I fandnot1s */
3971                     CHECK_FPU_FEATURE(dc, VIS1);
3972                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3973                     break;
3974                 case 0x06a: /* VIS I fnot1 */
3975                     CHECK_FPU_FEATURE(dc, VIS1);
3976                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3977                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3978                                     cpu_fpr[DFPREG(rs1) + 1]);
3979                     break;
3980                 case 0x06b: /* VIS I fnot1s */
3981                     CHECK_FPU_FEATURE(dc, VIS1);
3982                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3983                     break;
3984                 case 0x06c: /* VIS I fxor */
3985                     CHECK_FPU_FEATURE(dc, VIS1);
3986                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3987                                     cpu_fpr[DFPREG(rs2)]);
3988                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3989                                     cpu_fpr[DFPREG(rs1) + 1],
3990                                     cpu_fpr[DFPREG(rs2) + 1]);
3991                     break;
3992                 case 0x06d: /* VIS I fxors */
3993                     CHECK_FPU_FEATURE(dc, VIS1);
3994                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3995                     break;
3996                 case 0x06e: /* VIS I fnand */
3997                     CHECK_FPU_FEATURE(dc, VIS1);
3998                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3999                                      cpu_fpr[DFPREG(rs2)]);
4000                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4001                                      cpu_fpr[DFPREG(rs2) + 1]);
4002                     break;
4003                 case 0x06f: /* VIS I fnands */
4004                     CHECK_FPU_FEATURE(dc, VIS1);
4005                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4006                     break;
4007                 case 0x070: /* VIS I fand */
4008                     CHECK_FPU_FEATURE(dc, VIS1);
4009                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4010                                     cpu_fpr[DFPREG(rs2)]);
4011                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4012                                     cpu_fpr[DFPREG(rs1) + 1],
4013                                     cpu_fpr[DFPREG(rs2) + 1]);
4014                     break;
4015                 case 0x071: /* VIS I fands */
4016                     CHECK_FPU_FEATURE(dc, VIS1);
4017                     tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4018                     break;
4019                 case 0x072: /* VIS I fxnor */
4020                     CHECK_FPU_FEATURE(dc, VIS1);
4021                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4022                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4023                                     cpu_fpr[DFPREG(rs1)]);
4024                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4025                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4026                                     cpu_fpr[DFPREG(rs1) + 1]);
4027                     break;
4028                 case 0x073: /* VIS I fxnors */
4029                     CHECK_FPU_FEATURE(dc, VIS1);
4030                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4031                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4032                     break;
4033                 case 0x074: /* VIS I fsrc1 */
4034                     CHECK_FPU_FEATURE(dc, VIS1);
4035                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4036                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4037                                     cpu_fpr[DFPREG(rs1) + 1]);
4038                     break;
4039                 case 0x075: /* VIS I fsrc1s */
4040                     CHECK_FPU_FEATURE(dc, VIS1);
4041                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4042                     break;
4043                 case 0x076: /* VIS I fornot2 */
4044                     CHECK_FPU_FEATURE(dc, VIS1);
4045                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4046                                     cpu_fpr[DFPREG(rs2)]);
4047                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4048                                     cpu_fpr[DFPREG(rs1) + 1],
4049                                     cpu_fpr[DFPREG(rs2) + 1]);
4050                     break;
4051                 case 0x077: /* VIS I fornot2s */
4052                     CHECK_FPU_FEATURE(dc, VIS1);
4053                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4054                     break;
4055                 case 0x078: /* VIS I fsrc2 */
4056                     CHECK_FPU_FEATURE(dc, VIS1);
4057                     gen_op_load_fpr_DT0(DFPREG(rs2));
4058                     gen_op_store_DT0_fpr(DFPREG(rd));
4059                     break;
4060                 case 0x079: /* VIS I fsrc2s */
4061                     CHECK_FPU_FEATURE(dc, VIS1);
4062                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4063                     break;
4064                 case 0x07a: /* VIS I fornot1 */
4065                     CHECK_FPU_FEATURE(dc, VIS1);
4066                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4067                                     cpu_fpr[DFPREG(rs1)]);
4068                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4069                                     cpu_fpr[DFPREG(rs2) + 1],
4070                                     cpu_fpr[DFPREG(rs1) + 1]);
4071                     break;
4072                 case 0x07b: /* VIS I fornot1s */
4073                     CHECK_FPU_FEATURE(dc, VIS1);
4074                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4075                     break;
4076                 case 0x07c: /* VIS I for */
4077                     CHECK_FPU_FEATURE(dc, VIS1);
4078                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4079                                    cpu_fpr[DFPREG(rs2)]);
4080                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4081                                    cpu_fpr[DFPREG(rs1) + 1],
4082                                    cpu_fpr[DFPREG(rs2) + 1]);
4083                     break;
4084                 case 0x07d: /* VIS I fors */
4085                     CHECK_FPU_FEATURE(dc, VIS1);
4086                     tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4087                     break;
4088                 case 0x07e: /* VIS I fone */
4089                     CHECK_FPU_FEATURE(dc, VIS1);
4090                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4091                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4092                     break;
4093                 case 0x07f: /* VIS I fones */
4094                     CHECK_FPU_FEATURE(dc, VIS1);
4095                     tcg_gen_movi_i32(cpu_fpr[rd], -1);
4096                     break;
4097                 case 0x080: /* VIS I shutdown */
4098                 case 0x081: /* VIS II siam */
4099                     // XXX
4100                     goto illegal_insn;
4101                 default:
4102                     goto illegal_insn;
4103                 }
4104 #else
4105                 goto ncp_insn;
4106 #endif
4107             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4108 #ifdef TARGET_SPARC64
4109                 goto illegal_insn;
4110 #else
4111                 goto ncp_insn;
4112 #endif
4113 #ifdef TARGET_SPARC64
4114             } else if (xop == 0x39) { /* V9 return */
4115                 TCGv_i32 r_const;
4116
4117                 save_state(dc, cpu_cond);
4118                 cpu_src1 = get_src1(insn, cpu_src1);
4119                 if (IS_IMM) {   /* immediate */
4120                     simm = GET_FIELDs(insn, 19, 31);
4121                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4122                 } else {                /* register */
4123                     rs2 = GET_FIELD(insn, 27, 31);
4124                     if (rs2) {
4125                         gen_movl_reg_TN(rs2, cpu_src2);
4126                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4127                     } else
4128                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4129                 }
4130                 gen_helper_restore();
4131                 gen_mov_pc_npc(dc, cpu_cond);
4132                 r_const = tcg_const_i32(3);
4133                 gen_helper_check_align(cpu_dst, r_const);
4134                 tcg_temp_free_i32(r_const);
4135                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4136                 dc->npc = DYNAMIC_PC;
4137                 goto jmp_insn;
4138 #endif
4139             } else {
4140                 cpu_src1 = get_src1(insn, cpu_src1);
4141                 if (IS_IMM) {   /* immediate */
4142                     simm = GET_FIELDs(insn, 19, 31);
4143                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4144                 } else {                /* register */
4145                     rs2 = GET_FIELD(insn, 27, 31);
4146                     if (rs2) {
4147                         gen_movl_reg_TN(rs2, cpu_src2);
4148                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4149                     } else
4150                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4151                 }
4152                 switch (xop) {
4153                 case 0x38:      /* jmpl */
4154                     {
4155                         TCGv r_pc;
4156                         TCGv_i32 r_const;
4157
4158                         r_pc = tcg_const_tl(dc->pc);
4159                         gen_movl_TN_reg(rd, r_pc);
4160                         tcg_temp_free(r_pc);
4161                         gen_mov_pc_npc(dc, cpu_cond);
4162                         r_const = tcg_const_i32(3);
4163                         gen_helper_check_align(cpu_dst, r_const);
4164                         tcg_temp_free_i32(r_const);
4165                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4166                         dc->npc = DYNAMIC_PC;
4167                     }
4168                     goto jmp_insn;
4169 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4170                 case 0x39:      /* rett, V9 return */
4171                     {
4172                         TCGv_i32 r_const;
4173
4174                         if (!supervisor(dc))
4175                             goto priv_insn;
4176                         gen_mov_pc_npc(dc, cpu_cond);
4177                         r_const = tcg_const_i32(3);
4178                         gen_helper_check_align(cpu_dst, r_const);
4179                         tcg_temp_free_i32(r_const);
4180                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4181                         dc->npc = DYNAMIC_PC;
4182                         gen_helper_rett();
4183                     }
4184                     goto jmp_insn;
4185 #endif
4186                 case 0x3b: /* flush */
4187                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4188                         goto unimp_flush;
4189                     gen_helper_flush(cpu_dst);
4190                     break;
4191                 case 0x3c:      /* save */
4192                     save_state(dc, cpu_cond);
4193                     gen_helper_save();
4194                     gen_movl_TN_reg(rd, cpu_dst);
4195                     break;
4196                 case 0x3d:      /* restore */
4197                     save_state(dc, cpu_cond);
4198                     gen_helper_restore();
4199                     gen_movl_TN_reg(rd, cpu_dst);
4200                     break;
4201 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4202                 case 0x3e:      /* V9 done/retry */
4203                     {
4204                         switch (rd) {
4205                         case 0:
4206                             if (!supervisor(dc))
4207                                 goto priv_insn;
4208                             dc->npc = DYNAMIC_PC;
4209                             dc->pc = DYNAMIC_PC;
4210                             gen_helper_done();
4211                             goto jmp_insn;
4212                         case 1:
4213                             if (!supervisor(dc))
4214                                 goto priv_insn;
4215                             dc->npc = DYNAMIC_PC;
4216                             dc->pc = DYNAMIC_PC;
4217                             gen_helper_retry();
4218                             goto jmp_insn;
4219                         default:
4220                             goto illegal_insn;
4221                         }
4222                     }
4223                     break;
4224 #endif
4225                 default:
4226                     goto illegal_insn;
4227                 }
4228             }
4229             break;
4230         }
4231         break;
4232     case 3:                     /* load/store instructions */
4233         {
4234             unsigned int xop = GET_FIELD(insn, 7, 12);
4235
4236             cpu_src1 = get_src1(insn, cpu_src1);
4237             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4238                 rs2 = GET_FIELD(insn, 27, 31);
4239                 gen_movl_reg_TN(rs2, cpu_src2);
4240                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4241             } else if (IS_IMM) {     /* immediate */
4242                 simm = GET_FIELDs(insn, 19, 31);
4243                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4244             } else {            /* register */
4245                 rs2 = GET_FIELD(insn, 27, 31);
4246                 if (rs2 != 0) {
4247                     gen_movl_reg_TN(rs2, cpu_src2);
4248                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4249                 } else
4250                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4251             }
4252             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4253                 (xop > 0x17 && xop <= 0x1d ) ||
4254                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4255                 switch (xop) {
4256                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4257                     gen_address_mask(dc, cpu_addr);
4258                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4259                     break;
4260                 case 0x1:       /* ldub, load unsigned byte */
4261                     gen_address_mask(dc, cpu_addr);
4262                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4263                     break;
4264                 case 0x2:       /* lduh, load unsigned halfword */
4265                     gen_address_mask(dc, cpu_addr);
4266                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4267                     break;
4268                 case 0x3:       /* ldd, load double word */
4269                     if (rd & 1)
4270                         goto illegal_insn;
4271                     else {
4272                         TCGv_i32 r_const;
4273
4274                         save_state(dc, cpu_cond);
4275                         r_const = tcg_const_i32(7);
4276                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4277                         tcg_temp_free_i32(r_const);
4278                         gen_address_mask(dc, cpu_addr);
4279                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4280                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4281                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4282                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4283                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4284                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4285                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4286                     }
4287                     break;
4288                 case 0x9:       /* ldsb, load signed byte */
4289                     gen_address_mask(dc, cpu_addr);
4290                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4291                     break;
4292                 case 0xa:       /* ldsh, load signed halfword */
4293                     gen_address_mask(dc, cpu_addr);
4294                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4295                     break;
4296                 case 0xd:       /* ldstub -- XXX: should be atomically */
4297                     {
4298                         TCGv r_const;
4299
4300                         gen_address_mask(dc, cpu_addr);
4301                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4302                         r_const = tcg_const_tl(0xff);
4303                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4304                         tcg_temp_free(r_const);
4305                     }
4306                     break;
4307                 case 0x0f:      /* swap, swap register with memory. Also
4308                                    atomically */
4309                     CHECK_IU_FEATURE(dc, SWAP);
4310                     gen_movl_reg_TN(rd, cpu_val);
4311                     gen_address_mask(dc, cpu_addr);
4312                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4313                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4314                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4315                     break;
4316 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4317                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4318 #ifndef TARGET_SPARC64
4319                     if (IS_IMM)
4320                         goto illegal_insn;
4321                     if (!supervisor(dc))
4322                         goto priv_insn;
4323 #endif
4324                     save_state(dc, cpu_cond);
4325                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4326                     break;
4327                 case 0x11:      /* lduba, load unsigned byte alternate */
4328 #ifndef TARGET_SPARC64
4329                     if (IS_IMM)
4330                         goto illegal_insn;
4331                     if (!supervisor(dc))
4332                         goto priv_insn;
4333 #endif
4334                     save_state(dc, cpu_cond);
4335                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4336                     break;
4337                 case 0x12:      /* lduha, load unsigned halfword alternate */
4338 #ifndef TARGET_SPARC64
4339                     if (IS_IMM)
4340                         goto illegal_insn;
4341                     if (!supervisor(dc))
4342                         goto priv_insn;
4343 #endif
4344                     save_state(dc, cpu_cond);
4345                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4346                     break;
4347                 case 0x13:      /* ldda, load double word alternate */
4348 #ifndef TARGET_SPARC64
4349                     if (IS_IMM)
4350                         goto illegal_insn;
4351                     if (!supervisor(dc))
4352                         goto priv_insn;
4353 #endif
4354                     if (rd & 1)
4355                         goto illegal_insn;
4356                     save_state(dc, cpu_cond);
4357                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4358                     goto skip_move;
4359                 case 0x19:      /* ldsba, load signed byte alternate */
4360 #ifndef TARGET_SPARC64
4361                     if (IS_IMM)
4362                         goto illegal_insn;
4363                     if (!supervisor(dc))
4364                         goto priv_insn;
4365 #endif
4366                     save_state(dc, cpu_cond);
4367                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4368                     break;
4369                 case 0x1a:      /* ldsha, load signed halfword alternate */
4370 #ifndef TARGET_SPARC64
4371                     if (IS_IMM)
4372                         goto illegal_insn;
4373                     if (!supervisor(dc))
4374                         goto priv_insn;
4375 #endif
4376                     save_state(dc, cpu_cond);
4377                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4378                     break;
4379                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4380 #ifndef TARGET_SPARC64
4381                     if (IS_IMM)
4382                         goto illegal_insn;
4383                     if (!supervisor(dc))
4384                         goto priv_insn;
4385 #endif
4386                     save_state(dc, cpu_cond);
4387                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4388                     break;
4389                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4390                                    atomically */
4391                     CHECK_IU_FEATURE(dc, SWAP);
4392 #ifndef TARGET_SPARC64
4393                     if (IS_IMM)
4394                         goto illegal_insn;
4395                     if (!supervisor(dc))
4396                         goto priv_insn;
4397 #endif
4398                     save_state(dc, cpu_cond);
4399                     gen_movl_reg_TN(rd, cpu_val);
4400                     gen_swap_asi(cpu_val, cpu_addr, insn);
4401                     break;
4402
4403 #ifndef TARGET_SPARC64
4404                 case 0x30: /* ldc */
4405                 case 0x31: /* ldcsr */
4406                 case 0x33: /* lddc */
4407                     goto ncp_insn;
4408 #endif
4409 #endif
4410 #ifdef TARGET_SPARC64
4411                 case 0x08: /* V9 ldsw */
4412                     gen_address_mask(dc, cpu_addr);
4413                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4414                     break;
4415                 case 0x0b: /* V9 ldx */
4416                     gen_address_mask(dc, cpu_addr);
4417                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4418                     break;
4419                 case 0x18: /* V9 ldswa */
4420                     save_state(dc, cpu_cond);
4421                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4422                     break;
4423                 case 0x1b: /* V9 ldxa */
4424                     save_state(dc, cpu_cond);
4425                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4426                     break;
4427                 case 0x2d: /* V9 prefetch, no effect */
4428                     goto skip_move;
4429                 case 0x30: /* V9 ldfa */
4430                     save_state(dc, cpu_cond);
4431                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4432                     goto skip_move;
4433                 case 0x33: /* V9 lddfa */
4434                     save_state(dc, cpu_cond);
4435                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4436                     goto skip_move;
4437                 case 0x3d: /* V9 prefetcha, no effect */
4438                     goto skip_move;
4439                 case 0x32: /* V9 ldqfa */
4440                     CHECK_FPU_FEATURE(dc, FLOAT128);
4441                     save_state(dc, cpu_cond);
4442                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4443                     goto skip_move;
4444 #endif
4445                 default:
4446                     goto illegal_insn;
4447                 }
4448                 gen_movl_TN_reg(rd, cpu_val);
4449 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4450             skip_move: ;
4451 #endif
4452             } else if (xop >= 0x20 && xop < 0x24) {
4453                 if (gen_trap_ifnofpu(dc, cpu_cond))
4454                     goto jmp_insn;
4455                 save_state(dc, cpu_cond);
4456                 switch (xop) {
4457                 case 0x20:      /* ldf, load fpreg */
4458                     gen_address_mask(dc, cpu_addr);
4459                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4460                     tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4461                     break;
4462                 case 0x21:      /* ldfsr, V9 ldxfsr */
4463 #ifdef TARGET_SPARC64
4464                     gen_address_mask(dc, cpu_addr);
4465                     if (rd == 1) {
4466                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4467                         gen_helper_ldxfsr(cpu_tmp64);
4468                     } else
4469 #else
4470                     {
4471                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4472                         gen_helper_ldfsr(cpu_tmp32);
4473                     }
4474 #endif
4475                     break;
4476                 case 0x22:      /* ldqf, load quad fpreg */
4477                     {
4478                         TCGv_i32 r_const;
4479
4480                         CHECK_FPU_FEATURE(dc, FLOAT128);
4481                         r_const = tcg_const_i32(dc->mem_idx);
4482                         gen_helper_ldqf(cpu_addr, r_const);
4483                         tcg_temp_free_i32(r_const);
4484                         gen_op_store_QT0_fpr(QFPREG(rd));
4485                     }
4486                     break;
4487                 case 0x23:      /* lddf, load double fpreg */
4488                     {
4489                         TCGv_i32 r_const;
4490
4491                         r_const = tcg_const_i32(dc->mem_idx);
4492                         gen_helper_lddf(cpu_addr, r_const);
4493                         tcg_temp_free_i32(r_const);
4494                         gen_op_store_DT0_fpr(DFPREG(rd));
4495                     }
4496                     break;
4497                 default:
4498                     goto illegal_insn;
4499                 }
4500             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4501                        xop == 0xe || xop == 0x1e) {
4502                 gen_movl_reg_TN(rd, cpu_val);
4503                 switch (xop) {
4504                 case 0x4: /* st, store word */
4505                     gen_address_mask(dc, cpu_addr);
4506                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4507                     break;
4508                 case 0x5: /* stb, store byte */
4509                     gen_address_mask(dc, cpu_addr);
4510                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4511                     break;
4512                 case 0x6: /* sth, store halfword */
4513                     gen_address_mask(dc, cpu_addr);
4514                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4515                     break;
4516                 case 0x7: /* std, store double word */
4517                     if (rd & 1)
4518                         goto illegal_insn;
4519                     else {
4520                         TCGv_i32 r_const;
4521
4522                         save_state(dc, cpu_cond);
4523                         gen_address_mask(dc, cpu_addr);
4524                         r_const = tcg_const_i32(7);
4525                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4526                         tcg_temp_free_i32(r_const);
4527                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4528                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4529                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4530                     }
4531                     break;
4532 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4533                 case 0x14: /* sta, V9 stwa, store word alternate */
4534 #ifndef TARGET_SPARC64
4535                     if (IS_IMM)
4536                         goto illegal_insn;
4537                     if (!supervisor(dc))
4538                         goto priv_insn;
4539 #endif
4540                     save_state(dc, cpu_cond);
4541                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4542                     break;
4543                 case 0x15: /* stba, store byte alternate */
4544 #ifndef TARGET_SPARC64
4545                     if (IS_IMM)
4546                         goto illegal_insn;
4547                     if (!supervisor(dc))
4548                         goto priv_insn;
4549 #endif
4550                     save_state(dc, cpu_cond);
4551                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4552                     break;
4553                 case 0x16: /* stha, store halfword alternate */
4554 #ifndef TARGET_SPARC64
4555                     if (IS_IMM)
4556                         goto illegal_insn;
4557                     if (!supervisor(dc))
4558                         goto priv_insn;
4559 #endif
4560                     save_state(dc, cpu_cond);
4561                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4562                     break;
4563                 case 0x17: /* stda, store double word alternate */
4564 #ifndef TARGET_SPARC64
4565                     if (IS_IMM)
4566                         goto illegal_insn;
4567                     if (!supervisor(dc))
4568                         goto priv_insn;
4569 #endif
4570                     if (rd & 1)
4571                         goto illegal_insn;
4572                     else {
4573                         save_state(dc, cpu_cond);
4574                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4575                     }
4576                     break;
4577 #endif
4578 #ifdef TARGET_SPARC64
4579                 case 0x0e: /* V9 stx */
4580                     gen_address_mask(dc, cpu_addr);
4581                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4582                     break;
4583                 case 0x1e: /* V9 stxa */
4584                     save_state(dc, cpu_cond);
4585                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4586                     break;
4587 #endif
4588                 default:
4589                     goto illegal_insn;
4590                 }
4591             } else if (xop > 0x23 && xop < 0x28) {
4592                 if (gen_trap_ifnofpu(dc, cpu_cond))
4593                     goto jmp_insn;
4594                 save_state(dc, cpu_cond);
4595                 switch (xop) {
4596                 case 0x24: /* stf, store fpreg */
4597                     gen_address_mask(dc, cpu_addr);
4598                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4599                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4600                     break;
4601                 case 0x25: /* stfsr, V9 stxfsr */
4602 #ifdef TARGET_SPARC64
4603                     gen_address_mask(dc, cpu_addr);
4604                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4605                     if (rd == 1)
4606                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4607                     else
4608                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4609 #else
4610                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4611                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4612 #endif
4613                     break;
4614                 case 0x26:
4615 #ifdef TARGET_SPARC64
4616                     /* V9 stqf, store quad fpreg */
4617                     {
4618                         TCGv_i32 r_const;
4619
4620                         CHECK_FPU_FEATURE(dc, FLOAT128);
4621                         gen_op_load_fpr_QT0(QFPREG(rd));
4622                         r_const = tcg_const_i32(dc->mem_idx);
4623                         gen_helper_stqf(cpu_addr, r_const);
4624                         tcg_temp_free_i32(r_const);
4625                     }
4626                     break;
4627 #else /* !TARGET_SPARC64 */
4628                     /* stdfq, store floating point queue */
4629 #if defined(CONFIG_USER_ONLY)
4630                     goto illegal_insn;
4631 #else
4632                     if (!supervisor(dc))
4633                         goto priv_insn;
4634                     if (gen_trap_ifnofpu(dc, cpu_cond))
4635                         goto jmp_insn;
4636                     goto nfq_insn;
4637 #endif
4638 #endif
4639                 case 0x27: /* stdf, store double fpreg */
4640                     {
4641                         TCGv_i32 r_const;
4642
4643                         gen_op_load_fpr_DT0(DFPREG(rd));
4644                         r_const = tcg_const_i32(dc->mem_idx);
4645                         gen_helper_stdf(cpu_addr, r_const);
4646                         tcg_temp_free_i32(r_const);
4647                     }
4648                     break;
4649                 default:
4650                     goto illegal_insn;
4651                 }
4652             } else if (xop > 0x33 && xop < 0x3f) {
4653                 save_state(dc, cpu_cond);
4654                 switch (xop) {
4655 #ifdef TARGET_SPARC64
4656                 case 0x34: /* V9 stfa */
4657                     gen_stf_asi(cpu_addr, insn, 4, rd);
4658                     break;
4659                 case 0x36: /* V9 stqfa */
4660                     {
4661                         TCGv_i32 r_const;
4662
4663                         CHECK_FPU_FEATURE(dc, FLOAT128);
4664                         r_const = tcg_const_i32(7);
4665                         gen_helper_check_align(cpu_addr, r_const);
4666                         tcg_temp_free_i32(r_const);
4667                         gen_op_load_fpr_QT0(QFPREG(rd));
4668                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4669                     }
4670                     break;
4671                 case 0x37: /* V9 stdfa */
4672                     gen_op_load_fpr_DT0(DFPREG(rd));
4673                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4674                     break;
4675                 case 0x3c: /* V9 casa */
4676                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4677                     gen_movl_TN_reg(rd, cpu_val);
4678                     break;
4679                 case 0x3e: /* V9 casxa */
4680                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4681                     gen_movl_TN_reg(rd, cpu_val);
4682                     break;
4683 #else
4684                 case 0x34: /* stc */
4685                 case 0x35: /* stcsr */
4686                 case 0x36: /* stdcq */
4687                 case 0x37: /* stdc */
4688                     goto ncp_insn;
4689 #endif
4690                 default:
4691                     goto illegal_insn;
4692                 }
4693             } else
4694                 goto illegal_insn;
4695         }
4696         break;
4697     }
4698     /* default case for non jump instructions */
4699     if (dc->npc == DYNAMIC_PC) {
4700         dc->pc = DYNAMIC_PC;
4701         gen_op_next_insn();
4702     } else if (dc->npc == JUMP_PC) {
4703         /* we can do a static jump */
4704         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4705         dc->is_br = 1;
4706     } else {
4707         dc->pc = dc->npc;
4708         dc->npc = dc->npc + 4;
4709     }
4710  jmp_insn:
4711     return;
4712  illegal_insn:
4713     {
4714         TCGv_i32 r_const;
4715
4716         save_state(dc, cpu_cond);
4717         r_const = tcg_const_i32(TT_ILL_INSN);
4718         gen_helper_raise_exception(r_const);
4719         tcg_temp_free_i32(r_const);
4720         dc->is_br = 1;
4721     }
4722     return;
4723  unimp_flush:
4724     {
4725         TCGv_i32 r_const;
4726
4727         save_state(dc, cpu_cond);
4728         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4729         gen_helper_raise_exception(r_const);
4730         tcg_temp_free_i32(r_const);
4731         dc->is_br = 1;
4732     }
4733     return;
4734 #if !defined(CONFIG_USER_ONLY)
4735  priv_insn:
4736     {
4737         TCGv_i32 r_const;
4738
4739         save_state(dc, cpu_cond);
4740         r_const = tcg_const_i32(TT_PRIV_INSN);
4741         gen_helper_raise_exception(r_const);
4742         tcg_temp_free_i32(r_const);
4743         dc->is_br = 1;
4744     }
4745     return;
4746 #endif
4747  nfpu_insn:
4748     save_state(dc, cpu_cond);
4749     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4750     dc->is_br = 1;
4751     return;
4752 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4753  nfq_insn:
4754     save_state(dc, cpu_cond);
4755     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4756     dc->is_br = 1;
4757     return;
4758 #endif
4759 #ifndef TARGET_SPARC64
4760  ncp_insn:
4761     {
4762         TCGv r_const;
4763
4764         save_state(dc, cpu_cond);
4765         r_const = tcg_const_i32(TT_NCP_INSN);
4766         gen_helper_raise_exception(r_const);
4767         tcg_temp_free(r_const);
4768         dc->is_br = 1;
4769     }
4770     return;
4771 #endif
4772 }
4773
4774 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4775                                                   int spc, CPUSPARCState *env)
4776 {
4777     target_ulong pc_start, last_pc;
4778     uint16_t *gen_opc_end;
4779     DisasContext dc1, *dc = &dc1;
4780     CPUBreakpoint *bp;
4781     int j, lj = -1;
4782     int num_insns;
4783     int max_insns;
4784
4785     memset(dc, 0, sizeof(DisasContext));
4786     dc->tb = tb;
4787     pc_start = tb->pc;
4788     dc->pc = pc_start;
4789     last_pc = dc->pc;
4790     dc->npc = (target_ulong) tb->cs_base;
4791     dc->cc_op = CC_OP_DYNAMIC;
4792     dc->mem_idx = cpu_mmu_index(env);
4793     dc->def = env->def;
4794     if ((dc->def->features & CPU_FEATURE_FLOAT))
4795         dc->fpu_enabled = cpu_fpu_enabled(env);
4796     else
4797         dc->fpu_enabled = 0;
4798 #ifdef TARGET_SPARC64
4799     dc->address_mask_32bit = env->pstate & PS_AM;
4800 #endif
4801     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4802
4803     cpu_tmp0 = tcg_temp_new();
4804     cpu_tmp32 = tcg_temp_new_i32();
4805     cpu_tmp64 = tcg_temp_new_i64();
4806
4807     cpu_dst = tcg_temp_local_new();
4808
4809     // loads and stores
4810     cpu_val = tcg_temp_local_new();
4811     cpu_addr = tcg_temp_local_new();
4812
4813     num_insns = 0;
4814     max_insns = tb->cflags & CF_COUNT_MASK;
4815     if (max_insns == 0)
4816         max_insns = CF_COUNT_MASK;
4817     gen_icount_start();
4818     do {
4819         if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4820             TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4821                 if (bp->pc == dc->pc) {
4822                     if (dc->pc != pc_start)
4823                         save_state(dc, cpu_cond);
4824                     gen_helper_debug();
4825                     tcg_gen_exit_tb(0);
4826                     dc->is_br = 1;
4827                     goto exit_gen_loop;
4828                 }
4829             }
4830         }
4831         if (spc) {
4832             qemu_log("Search PC...\n");
4833             j = gen_opc_ptr - gen_opc_buf;
4834             if (lj < j) {
4835                 lj++;
4836                 while (lj < j)
4837                     gen_opc_instr_start[lj++] = 0;
4838                 gen_opc_pc[lj] = dc->pc;
4839                 gen_opc_npc[lj] = dc->npc;
4840                 gen_opc_instr_start[lj] = 1;
4841                 gen_opc_icount[lj] = num_insns;
4842             }
4843         }
4844         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4845             gen_io_start();
4846         last_pc = dc->pc;
4847         disas_sparc_insn(dc);
4848         num_insns++;
4849
4850         if (dc->is_br)
4851             break;
4852         /* if the next PC is different, we abort now */
4853         if (dc->pc != (last_pc + 4))
4854             break;
4855         /* if we reach a page boundary, we stop generation so that the
4856            PC of a TT_TFAULT exception is always in the right page */
4857         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4858             break;
4859         /* if single step mode, we generate only one instruction and
4860            generate an exception */
4861         if (env->singlestep_enabled || singlestep) {
4862             tcg_gen_movi_tl(cpu_pc, dc->pc);
4863             tcg_gen_exit_tb(0);
4864             break;
4865         }
4866     } while ((gen_opc_ptr < gen_opc_end) &&
4867              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4868              num_insns < max_insns);
4869
4870  exit_gen_loop:
4871     tcg_temp_free(cpu_addr);
4872     tcg_temp_free(cpu_val);
4873     tcg_temp_free(cpu_dst);
4874     tcg_temp_free_i64(cpu_tmp64);
4875     tcg_temp_free_i32(cpu_tmp32);
4876     tcg_temp_free(cpu_tmp0);
4877     if (tb->cflags & CF_LAST_IO)
4878         gen_io_end();
4879     if (!dc->is_br) {
4880         if (dc->pc != DYNAMIC_PC &&
4881             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4882             /* static PC and NPC: we can use direct chaining */
4883             gen_goto_tb(dc, 0, dc->pc, dc->npc);
4884         } else {
4885             if (dc->pc != DYNAMIC_PC)
4886                 tcg_gen_movi_tl(cpu_pc, dc->pc);
4887             save_npc(dc, cpu_cond);
4888             tcg_gen_exit_tb(0);
4889         }
4890     }
4891     gen_icount_end(tb, num_insns);
4892     *gen_opc_ptr = INDEX_op_end;
4893     if (spc) {
4894         j = gen_opc_ptr - gen_opc_buf;
4895         lj++;
4896         while (lj <= j)
4897             gen_opc_instr_start[lj++] = 0;
4898 #if 0
4899         log_page_dump();
4900 #endif
4901         gen_opc_jump_pc[0] = dc->jump_pc[0];
4902         gen_opc_jump_pc[1] = dc->jump_pc[1];
4903     } else {
4904         tb->size = last_pc + 4 - pc_start;
4905         tb->icount = num_insns;
4906     }
4907 #ifdef DEBUG_DISAS
4908     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4909         qemu_log("--------------\n");
4910         qemu_log("IN: %s\n", lookup_symbol(pc_start));
4911         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4912         qemu_log("\n");
4913     }
4914 #endif
4915 }
4916
4917 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4918 {
4919     gen_intermediate_code_internal(tb, 0, env);
4920 }
4921
4922 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4923 {
4924     gen_intermediate_code_internal(tb, 1, env);
4925 }
4926
4927 void gen_intermediate_code_init(CPUSPARCState *env)
4928 {
4929     unsigned int i;
4930     static int inited;
4931     static const char * const gregnames[8] = {
4932         NULL, // g0 not used
4933         "g1",
4934         "g2",
4935         "g3",
4936         "g4",
4937         "g5",
4938         "g6",
4939         "g7",
4940     };
4941     static const char * const fregnames[64] = {
4942         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4943         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4944         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4945         "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4946         "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4947         "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4948         "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4949         "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4950     };
4951
4952     /* init various static tables */
4953     if (!inited) {
4954         inited = 1;
4955
4956         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4957         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4958                                              offsetof(CPUState, regwptr),
4959                                              "regwptr");
4960 #ifdef TARGET_SPARC64
4961         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4962                                          "xcc");
4963         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4964                                          "asi");
4965         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4966                                           "fprs");
4967         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4968                                      "gsr");
4969         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4970                                            offsetof(CPUState, tick_cmpr),
4971                                            "tick_cmpr");
4972         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4973                                             offsetof(CPUState, stick_cmpr),
4974                                             "stick_cmpr");
4975         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4976                                              offsetof(CPUState, hstick_cmpr),
4977                                              "hstick_cmpr");
4978         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4979                                        "hintp");
4980         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4981                                       "htba");
4982         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4983                                       "hver");
4984         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4985                                      offsetof(CPUState, ssr), "ssr");
4986         cpu_ver = tcg_global_mem_new(TCG_AREG0,
4987                                      offsetof(CPUState, version), "ver");
4988         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4989                                              offsetof(CPUState, softint),
4990                                              "softint");
4991 #else
4992         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4993                                      "wim");
4994 #endif
4995         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4996                                       "cond");
4997         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4998                                         "cc_src");
4999         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5000                                          offsetof(CPUState, cc_src2),
5001                                          "cc_src2");
5002         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5003                                         "cc_dst");
5004         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5005                                            "cc_op");
5006         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5007                                          "psr");
5008         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5009                                      "fsr");
5010         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5011                                     "pc");
5012         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5013                                      "npc");
5014         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5015 #ifndef CONFIG_USER_ONLY
5016         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5017                                      "tbr");
5018 #endif
5019         for (i = 1; i < 8; i++)
5020             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5021                                               offsetof(CPUState, gregs[i]),
5022                                               gregnames[i]);
5023         for (i = 0; i < TARGET_FPREGS; i++)
5024             cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5025                                                 offsetof(CPUState, fpr[i]),
5026                                                 fregnames[i]);
5027
5028         /* register helpers */
5029
5030 #define GEN_HELPER 2
5031 #include "helper.h"
5032     }
5033 }
5034
5035 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5036                 unsigned long searched_pc, int pc_pos, void *puc)
5037 {
5038     target_ulong npc;
5039     env->pc = gen_opc_pc[pc_pos];
5040     npc = gen_opc_npc[pc_pos];
5041     if (npc == 1) {
5042         /* dynamic NPC: already stored */
5043     } else if (npc == 2) {
5044         target_ulong t2 = (target_ulong)(unsigned long)puc;
5045         /* jump PC: use T2 and the jump targets of the translation */
5046         if (t2)
5047             env->npc = gen_opc_jump_pc[0];
5048         else
5049             env->npc = gen_opc_jump_pc[1];
5050     } else {
5051         env->npc = npc;
5052     }
5053 }