Use dynamical computation for condition codes
[qemu] / target-sparc / translate.c
1 /*
2    SPARC translation
3
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, write to the Free Software
19    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA  02110-1301 USA
20  */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define GEN_HELPER 1
35 #include "helper.h"
36
37 #define DEBUG_DISAS
38
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst, cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68
69 #include "gen-icount.h"
70
71 typedef struct DisasContext {
72     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
73     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75     int is_br;
76     int mem_idx;
77     int fpu_enabled;
78     int address_mask_32bit;
79     uint32_t cc_op;  /* current CC operation */
80     struct TranslationBlock *tb;
81     sparc_def_t *def;
82 } DisasContext;
83
84 // This function uses non-native bit order
85 #define GET_FIELD(X, FROM, TO)                                  \
86     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87
88 // This function uses the order in the manuals, i.e. bit 0 is 2^0
89 #define GET_FIELD_SP(X, FROM, TO)               \
90     GET_FIELD(X, 31 - (TO), 31 - (FROM))
91
92 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
93 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94
95 #ifdef TARGET_SPARC64
96 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
97 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #else
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
101 #endif
102
103 #define UA2005_HTRAP_MASK 0xff
104 #define V8_TRAP_MASK 0x7f
105
106 static int sign_extend(int x, int len)
107 {
108     len = 32 - len;
109     return (x << len) >> len;
110 }
111
112 #define IS_IMM (insn & (1<<13))
113
114 /* floating point registers moves */
115 static void gen_op_load_fpr_DT0(unsigned int src)
116 {
117     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
118                    offsetof(CPU_DoubleU, l.upper));
119     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
120                    offsetof(CPU_DoubleU, l.lower));
121 }
122
123 static void gen_op_load_fpr_DT1(unsigned int src)
124 {
125     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
126                    offsetof(CPU_DoubleU, l.upper));
127     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
128                    offsetof(CPU_DoubleU, l.lower));
129 }
130
131 static void gen_op_store_DT0_fpr(unsigned int dst)
132 {
133     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
134                    offsetof(CPU_DoubleU, l.upper));
135     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
136                    offsetof(CPU_DoubleU, l.lower));
137 }
138
139 static void gen_op_load_fpr_QT0(unsigned int src)
140 {
141     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
142                    offsetof(CPU_QuadU, l.upmost));
143     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
144                    offsetof(CPU_QuadU, l.upper));
145     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
146                    offsetof(CPU_QuadU, l.lower));
147     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
148                    offsetof(CPU_QuadU, l.lowest));
149 }
150
151 static void gen_op_load_fpr_QT1(unsigned int src)
152 {
153     tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
154                    offsetof(CPU_QuadU, l.upmost));
155     tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
156                    offsetof(CPU_QuadU, l.upper));
157     tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
158                    offsetof(CPU_QuadU, l.lower));
159     tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
160                    offsetof(CPU_QuadU, l.lowest));
161 }
162
163 static void gen_op_store_QT0_fpr(unsigned int dst)
164 {
165     tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
166                    offsetof(CPU_QuadU, l.upmost));
167     tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
168                    offsetof(CPU_QuadU, l.upper));
169     tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
170                    offsetof(CPU_QuadU, l.lower));
171     tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
172                    offsetof(CPU_QuadU, l.lowest));
173 }
174
175 /* moves */
176 #ifdef CONFIG_USER_ONLY
177 #define supervisor(dc) 0
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) 0
180 #endif
181 #else
182 #define supervisor(dc) (dc->mem_idx >= 1)
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) (dc->mem_idx == 2)
185 #else
186 #endif
187 #endif
188
189 #ifdef TARGET_SPARC64
190 #ifndef TARGET_ABI32
191 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #else
193 #define AM_CHECK(dc) (1)
194 #endif
195 #endif
196
197 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 {
199 #ifdef TARGET_SPARC64
200     if (AM_CHECK(dc))
201         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
202 #endif
203 }
204
205 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 {
207     if (reg == 0)
208         tcg_gen_movi_tl(tn, 0);
209     else if (reg < 8)
210         tcg_gen_mov_tl(tn, cpu_gregs[reg]);
211     else {
212         tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
213     }
214 }
215
216 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 {
218     if (reg == 0)
219         return;
220     else if (reg < 8)
221         tcg_gen_mov_tl(cpu_gregs[reg], tn);
222     else {
223         tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
224     }
225 }
226
227 static inline void gen_goto_tb(DisasContext *s, int tb_num,
228                                target_ulong pc, target_ulong npc)
229 {
230     TranslationBlock *tb;
231
232     tb = s->tb;
233     if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
234         (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK))  {
235         /* jump to same page: we can use a direct jump */
236         tcg_gen_goto_tb(tb_num);
237         tcg_gen_movi_tl(cpu_pc, pc);
238         tcg_gen_movi_tl(cpu_npc, npc);
239         tcg_gen_exit_tb((long)tb + tb_num);
240     } else {
241         /* jump to another page: currently not optimized */
242         tcg_gen_movi_tl(cpu_pc, pc);
243         tcg_gen_movi_tl(cpu_npc, npc);
244         tcg_gen_exit_tb(0);
245     }
246 }
247
248 // XXX suboptimal
249 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 {
251     tcg_gen_extu_i32_tl(reg, src);
252     tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
253     tcg_gen_andi_tl(reg, reg, 0x1);
254 }
255
256 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 {
258     tcg_gen_extu_i32_tl(reg, src);
259     tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
260     tcg_gen_andi_tl(reg, reg, 0x1);
261 }
262
263 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 {
265     tcg_gen_extu_i32_tl(reg, src);
266     tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
267     tcg_gen_andi_tl(reg, reg, 0x1);
268 }
269
270 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 {
272     tcg_gen_extu_i32_tl(reg, src);
273     tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
274     tcg_gen_andi_tl(reg, reg, 0x1);
275 }
276
277 static inline void gen_cc_clear_icc(void)
278 {
279     tcg_gen_movi_i32(cpu_psr, 0);
280 }
281
282 #ifdef TARGET_SPARC64
283 static inline void gen_cc_clear_xcc(void)
284 {
285     tcg_gen_movi_i32(cpu_xcc, 0);
286 }
287 #endif
288
289 /* old op:
290     if (!T0)
291         env->psr |= PSR_ZERO;
292     if ((int32_t) T0 < 0)
293         env->psr |= PSR_NEG;
294 */
295 static inline void gen_cc_NZ_icc(TCGv dst)
296 {
297     TCGv r_temp;
298     int l1, l2;
299
300     l1 = gen_new_label();
301     l2 = gen_new_label();
302     r_temp = tcg_temp_new();
303     tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
304     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
305     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
306     gen_set_label(l1);
307     tcg_gen_ext32s_tl(r_temp, dst);
308     tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
309     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
310     gen_set_label(l2);
311     tcg_temp_free(r_temp);
312 }
313
314 #ifdef TARGET_SPARC64
315 static inline void gen_cc_NZ_xcc(TCGv dst)
316 {
317     int l1, l2;
318
319     l1 = gen_new_label();
320     l2 = gen_new_label();
321     tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
322     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
323     gen_set_label(l1);
324     tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
325     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
326     gen_set_label(l2);
327 }
328 #endif
329
330 /* old op:
331     if (T0 < src1)
332         env->psr |= PSR_CARRY;
333 */
334 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 {
336     TCGv r_temp1, r_temp2;
337     int l1;
338
339     l1 = gen_new_label();
340     r_temp1 = tcg_temp_new();
341     r_temp2 = tcg_temp_new();
342     tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
343     tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
344     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
345     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
346     gen_set_label(l1);
347     tcg_temp_free(r_temp1);
348     tcg_temp_free(r_temp2);
349 }
350
351 #ifdef TARGET_SPARC64
352 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 {
354     int l1;
355
356     l1 = gen_new_label();
357     tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
358     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
359     gen_set_label(l1);
360 }
361 #endif
362
363 /* old op:
364     if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
365         env->psr |= PSR_OVF;
366 */
367 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 {
369     TCGv r_temp;
370
371     r_temp = tcg_temp_new();
372     tcg_gen_xor_tl(r_temp, src1, src2);
373     tcg_gen_not_tl(r_temp, r_temp);
374     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
375     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
376     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
377     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
378     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
379     tcg_temp_free(r_temp);
380     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
381 }
382
383 #ifdef TARGET_SPARC64
384 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 {
386     TCGv r_temp;
387
388     r_temp = tcg_temp_new();
389     tcg_gen_xor_tl(r_temp, src1, src2);
390     tcg_gen_not_tl(r_temp, r_temp);
391     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
392     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
393     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
394     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
395     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
396     tcg_temp_free(r_temp);
397     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 }
399 #endif
400
401 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 {
403     TCGv r_temp;
404     TCGv_i32 r_const;
405     int l1;
406
407     l1 = gen_new_label();
408
409     r_temp = tcg_temp_new();
410     tcg_gen_xor_tl(r_temp, src1, src2);
411     tcg_gen_not_tl(r_temp, r_temp);
412     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
413     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
414     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
415     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
416     r_const = tcg_const_i32(TT_TOVF);
417     gen_helper_raise_exception(r_const);
418     tcg_temp_free_i32(r_const);
419     gen_set_label(l1);
420     tcg_temp_free(r_temp);
421 }
422
423 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424 {
425     int l1;
426
427     l1 = gen_new_label();
428     tcg_gen_or_tl(cpu_tmp0, src1, src2);
429     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
430     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
431     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
432     gen_set_label(l1);
433 }
434
435 static inline void gen_op_logic_cc(TCGv dst)
436 {
437     tcg_gen_mov_tl(cpu_cc_dst, dst);
438
439     gen_cc_clear_icc();
440     gen_cc_NZ_icc(cpu_cc_dst);
441 #ifdef TARGET_SPARC64
442     gen_cc_clear_xcc();
443     gen_cc_NZ_xcc(cpu_cc_dst);
444 #endif
445 }
446
447 static inline void gen_tag_tv(TCGv src1, TCGv src2)
448 {
449     int l1;
450     TCGv_i32 r_const;
451
452     l1 = gen_new_label();
453     tcg_gen_or_tl(cpu_tmp0, src1, src2);
454     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
455     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
456     r_const = tcg_const_i32(TT_TOVF);
457     gen_helper_raise_exception(r_const);
458     tcg_temp_free_i32(r_const);
459     gen_set_label(l1);
460 }
461
462 static inline void gen_op_add_cc2(TCGv dst)
463 {
464     gen_cc_clear_icc();
465     gen_cc_NZ_icc(cpu_cc_dst);
466     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
467     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 #ifdef TARGET_SPARC64
469     gen_cc_clear_xcc();
470     gen_cc_NZ_xcc(cpu_cc_dst);
471     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
472     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473 #endif
474     tcg_gen_mov_tl(dst, cpu_cc_dst);
475 }
476
477 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
478 {
479     tcg_gen_mov_tl(cpu_cc_src, src1);
480     tcg_gen_movi_tl(cpu_cc_src2, src2);
481     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
482     gen_op_add_cc2(dst);
483 }
484
485 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
486 {
487     tcg_gen_mov_tl(cpu_cc_src, src1);
488     tcg_gen_mov_tl(cpu_cc_src2, src2);
489     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490     gen_op_add_cc2(dst);
491 }
492
493 static inline void gen_op_addx_cc2(TCGv dst)
494 {
495     gen_cc_NZ_icc(cpu_cc_dst);
496     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
497     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
498 #ifdef TARGET_SPARC64
499     gen_cc_NZ_xcc(cpu_cc_dst);
500     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
501     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
502 #endif
503     tcg_gen_mov_tl(dst, cpu_cc_dst);
504 }
505
506 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
507 {
508     tcg_gen_mov_tl(cpu_cc_src, src1);
509     tcg_gen_movi_tl(cpu_cc_src2, src2);
510     gen_mov_reg_C(cpu_tmp0, cpu_psr);
511     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
512     gen_cc_clear_icc();
513     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
514 #ifdef TARGET_SPARC64
515     gen_cc_clear_xcc();
516     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
517 #endif
518     tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
519     gen_op_addx_cc2(dst);
520 }
521
522 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
523 {
524     tcg_gen_mov_tl(cpu_cc_src, src1);
525     tcg_gen_mov_tl(cpu_cc_src2, src2);
526     gen_mov_reg_C(cpu_tmp0, cpu_psr);
527     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
528     gen_cc_clear_icc();
529     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
530 #ifdef TARGET_SPARC64
531     gen_cc_clear_xcc();
532     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
533 #endif
534     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
535     gen_op_addx_cc2(dst);
536 }
537
538 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
539 {
540     tcg_gen_mov_tl(cpu_cc_src, src1);
541     tcg_gen_mov_tl(cpu_cc_src2, src2);
542     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
543     gen_cc_clear_icc();
544     gen_cc_NZ_icc(cpu_cc_dst);
545     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
546     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
547     gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
548 #ifdef TARGET_SPARC64
549     gen_cc_clear_xcc();
550     gen_cc_NZ_xcc(cpu_cc_dst);
551     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
552     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
553 #endif
554     tcg_gen_mov_tl(dst, cpu_cc_dst);
555 }
556
557 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
558 {
559     tcg_gen_mov_tl(cpu_cc_src, src1);
560     tcg_gen_mov_tl(cpu_cc_src2, src2);
561     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
562     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
563     gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
564     gen_cc_clear_icc();
565     gen_cc_NZ_icc(cpu_cc_dst);
566     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
567 #ifdef TARGET_SPARC64
568     gen_cc_clear_xcc();
569     gen_cc_NZ_xcc(cpu_cc_dst);
570     gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
571     gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
572 #endif
573     tcg_gen_mov_tl(dst, cpu_cc_dst);
574 }
575
576 /* old op:
577     if (src1 < T1)
578         env->psr |= PSR_CARRY;
579 */
580 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
581 {
582     TCGv r_temp1, r_temp2;
583     int l1;
584
585     l1 = gen_new_label();
586     r_temp1 = tcg_temp_new();
587     r_temp2 = tcg_temp_new();
588     tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
589     tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
590     tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
591     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
592     gen_set_label(l1);
593     tcg_temp_free(r_temp1);
594     tcg_temp_free(r_temp2);
595 }
596
597 #ifdef TARGET_SPARC64
598 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
599 {
600     int l1;
601
602     l1 = gen_new_label();
603     tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
604     tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
605     gen_set_label(l1);
606 }
607 #endif
608
609 /* old op:
610     if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
611         env->psr |= PSR_OVF;
612 */
613 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
614 {
615     TCGv r_temp;
616
617     r_temp = tcg_temp_new();
618     tcg_gen_xor_tl(r_temp, src1, src2);
619     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
620     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
621     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
622     tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
623     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
624     tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
625     tcg_temp_free(r_temp);
626 }
627
628 #ifdef TARGET_SPARC64
629 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
630 {
631     TCGv r_temp;
632
633     r_temp = tcg_temp_new();
634     tcg_gen_xor_tl(r_temp, src1, src2);
635     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
636     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
637     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
638     tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
639     tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
640     tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
641     tcg_temp_free(r_temp);
642 }
643 #endif
644
645 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
646 {
647     TCGv r_temp;
648     TCGv_i32 r_const;
649     int l1;
650
651     l1 = gen_new_label();
652
653     r_temp = tcg_temp_new();
654     tcg_gen_xor_tl(r_temp, src1, src2);
655     tcg_gen_xor_tl(cpu_tmp0, src1, dst);
656     tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
657     tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
658     tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
659     r_const = tcg_const_i32(TT_TOVF);
660     gen_helper_raise_exception(r_const);
661     tcg_temp_free_i32(r_const);
662     gen_set_label(l1);
663     tcg_temp_free(r_temp);
664 }
665
666 static inline void gen_op_sub_cc2(TCGv dst)
667 {
668     gen_cc_clear_icc();
669     gen_cc_NZ_icc(cpu_cc_dst);
670     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
671     gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
672 #ifdef TARGET_SPARC64
673     gen_cc_clear_xcc();
674     gen_cc_NZ_xcc(cpu_cc_dst);
675     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
676     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
677 #endif
678     tcg_gen_mov_tl(dst, cpu_cc_dst);
679 }
680
681 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
682 {
683     tcg_gen_mov_tl(cpu_cc_src, src1);
684     tcg_gen_movi_tl(cpu_cc_src2, src2);
685     if (src2 == 0) {
686         tcg_gen_mov_tl(dst, src1);
687         gen_op_logic_cc(dst);
688     } else {
689         tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
690         gen_op_sub_cc2(dst);
691     }
692 }
693
694 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
695 {
696     tcg_gen_mov_tl(cpu_cc_src, src1);
697     tcg_gen_mov_tl(cpu_cc_src2, src2);
698     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
699     gen_op_sub_cc2(dst);
700 }
701
702 static inline void gen_op_subx_cc2(TCGv dst)
703 {
704     gen_cc_NZ_icc(cpu_cc_dst);
705     gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
706     gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
707 #ifdef TARGET_SPARC64
708     gen_cc_NZ_xcc(cpu_cc_dst);
709     gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
710     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711 #endif
712     tcg_gen_mov_tl(dst, cpu_cc_dst);
713 }
714
715 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
716 {
717     tcg_gen_mov_tl(cpu_cc_src, src1);
718     tcg_gen_movi_tl(cpu_cc_src2, src2);
719     gen_mov_reg_C(cpu_tmp0, cpu_psr);
720     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
721     gen_cc_clear_icc();
722     gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
723 #ifdef TARGET_SPARC64
724     gen_cc_clear_xcc();
725     gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
726 #endif
727     tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
728     gen_op_subx_cc2(dst);
729 }
730
731 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
732 {
733     tcg_gen_mov_tl(cpu_cc_src, src1);
734     tcg_gen_mov_tl(cpu_cc_src2, src2);
735     gen_mov_reg_C(cpu_tmp0, cpu_psr);
736     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
737     gen_cc_clear_icc();
738     gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
739 #ifdef TARGET_SPARC64
740     gen_cc_clear_xcc();
741     gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
742 #endif
743     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
744     gen_op_subx_cc2(dst);
745 }
746
747 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
748 {
749     tcg_gen_mov_tl(cpu_cc_src, src1);
750     tcg_gen_mov_tl(cpu_cc_src2, src2);
751     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
752     gen_cc_clear_icc();
753     gen_cc_NZ_icc(cpu_cc_dst);
754     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
755     gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
756     gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
757 #ifdef TARGET_SPARC64
758     gen_cc_clear_xcc();
759     gen_cc_NZ_xcc(cpu_cc_dst);
760     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
761     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
762 #endif
763     tcg_gen_mov_tl(dst, cpu_cc_dst);
764 }
765
766 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
767 {
768     tcg_gen_mov_tl(cpu_cc_src, src1);
769     tcg_gen_mov_tl(cpu_cc_src2, src2);
770     gen_tag_tv(cpu_cc_src, cpu_cc_src2);
771     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
772     gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
773     gen_cc_clear_icc();
774     gen_cc_NZ_icc(cpu_cc_dst);
775     gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
776 #ifdef TARGET_SPARC64
777     gen_cc_clear_xcc();
778     gen_cc_NZ_xcc(cpu_cc_dst);
779     gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
780     gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
781 #endif
782     tcg_gen_mov_tl(dst, cpu_cc_dst);
783 }
784
785 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
786 {
787     TCGv r_temp;
788     int l1;
789
790     l1 = gen_new_label();
791     r_temp = tcg_temp_new();
792
793     /* old op:
794     if (!(env->y & 1))
795         T1 = 0;
796     */
797     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
798     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
799     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
800     tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
801     tcg_gen_movi_tl(cpu_cc_src2, 0);
802     gen_set_label(l1);
803
804     // b2 = T0 & 1;
805     // env->y = (b2 << 31) | (env->y >> 1);
806     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
807     tcg_gen_shli_tl(r_temp, r_temp, 31);
808     tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
809     tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
810     tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
811     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
812
813     // b1 = N ^ V;
814     gen_mov_reg_N(cpu_tmp0, cpu_psr);
815     gen_mov_reg_V(r_temp, cpu_psr);
816     tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
817     tcg_temp_free(r_temp);
818
819     // T0 = (b1 << 31) | (T0 >> 1);
820     // src1 = T0;
821     tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
822     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
823     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
824
825     /* do addition and update flags */
826     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
827
828     gen_cc_clear_icc();
829     gen_cc_NZ_icc(cpu_cc_dst);
830     gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
831     gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
832     tcg_gen_mov_tl(dst, cpu_cc_dst);
833 }
834
835 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
836 {
837     TCGv_i64 r_temp, r_temp2;
838
839     r_temp = tcg_temp_new_i64();
840     r_temp2 = tcg_temp_new_i64();
841
842     tcg_gen_extu_tl_i64(r_temp, src2);
843     tcg_gen_extu_tl_i64(r_temp2, src1);
844     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
845
846     tcg_gen_shri_i64(r_temp, r_temp2, 32);
847     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
848     tcg_temp_free_i64(r_temp);
849     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
850 #ifdef TARGET_SPARC64
851     tcg_gen_mov_i64(dst, r_temp2);
852 #else
853     tcg_gen_trunc_i64_tl(dst, r_temp2);
854 #endif
855     tcg_temp_free_i64(r_temp2);
856 }
857
858 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
859 {
860     TCGv_i64 r_temp, r_temp2;
861
862     r_temp = tcg_temp_new_i64();
863     r_temp2 = tcg_temp_new_i64();
864
865     tcg_gen_ext_tl_i64(r_temp, src2);
866     tcg_gen_ext_tl_i64(r_temp2, src1);
867     tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
868
869     tcg_gen_shri_i64(r_temp, r_temp2, 32);
870     tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
871     tcg_temp_free_i64(r_temp);
872     tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
873 #ifdef TARGET_SPARC64
874     tcg_gen_mov_i64(dst, r_temp2);
875 #else
876     tcg_gen_trunc_i64_tl(dst, r_temp2);
877 #endif
878     tcg_temp_free_i64(r_temp2);
879 }
880
881 #ifdef TARGET_SPARC64
882 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
883 {
884     TCGv_i32 r_const;
885     int l1;
886
887     l1 = gen_new_label();
888     tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
889     r_const = tcg_const_i32(TT_DIV_ZERO);
890     gen_helper_raise_exception(r_const);
891     tcg_temp_free_i32(r_const);
892     gen_set_label(l1);
893 }
894
895 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
896 {
897     int l1, l2;
898
899     l1 = gen_new_label();
900     l2 = gen_new_label();
901     tcg_gen_mov_tl(cpu_cc_src, src1);
902     tcg_gen_mov_tl(cpu_cc_src2, src2);
903     gen_trap_ifdivzero_tl(cpu_cc_src2);
904     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
905     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
906     tcg_gen_movi_i64(dst, INT64_MIN);
907     tcg_gen_br(l2);
908     gen_set_label(l1);
909     tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
910     gen_set_label(l2);
911 }
912 #endif
913
914 static inline void gen_op_div_cc(TCGv dst)
915 {
916     int l1;
917
918     tcg_gen_mov_tl(cpu_cc_dst, dst);
919     gen_cc_clear_icc();
920     gen_cc_NZ_icc(cpu_cc_dst);
921     l1 = gen_new_label();
922     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
923     tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
924     gen_set_label(l1);
925 }
926
927 // 1
928 static inline void gen_op_eval_ba(TCGv dst)
929 {
930     tcg_gen_movi_tl(dst, 1);
931 }
932
933 // Z
934 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
935 {
936     gen_mov_reg_Z(dst, src);
937 }
938
939 // Z | (N ^ V)
940 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
941 {
942     gen_mov_reg_N(cpu_tmp0, src);
943     gen_mov_reg_V(dst, src);
944     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
945     gen_mov_reg_Z(cpu_tmp0, src);
946     tcg_gen_or_tl(dst, dst, cpu_tmp0);
947 }
948
949 // N ^ V
950 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
951 {
952     gen_mov_reg_V(cpu_tmp0, src);
953     gen_mov_reg_N(dst, src);
954     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
955 }
956
957 // C | Z
958 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
959 {
960     gen_mov_reg_Z(cpu_tmp0, src);
961     gen_mov_reg_C(dst, src);
962     tcg_gen_or_tl(dst, dst, cpu_tmp0);
963 }
964
965 // C
966 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
967 {
968     gen_mov_reg_C(dst, src);
969 }
970
971 // V
972 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
973 {
974     gen_mov_reg_V(dst, src);
975 }
976
977 // 0
978 static inline void gen_op_eval_bn(TCGv dst)
979 {
980     tcg_gen_movi_tl(dst, 0);
981 }
982
983 // N
984 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
985 {
986     gen_mov_reg_N(dst, src);
987 }
988
989 // !Z
990 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
991 {
992     gen_mov_reg_Z(dst, src);
993     tcg_gen_xori_tl(dst, dst, 0x1);
994 }
995
996 // !(Z | (N ^ V))
997 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
998 {
999     gen_mov_reg_N(cpu_tmp0, src);
1000     gen_mov_reg_V(dst, src);
1001     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1002     gen_mov_reg_Z(cpu_tmp0, src);
1003     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004     tcg_gen_xori_tl(dst, dst, 0x1);
1005 }
1006
1007 // !(N ^ V)
1008 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
1009 {
1010     gen_mov_reg_V(cpu_tmp0, src);
1011     gen_mov_reg_N(dst, src);
1012     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1013     tcg_gen_xori_tl(dst, dst, 0x1);
1014 }
1015
1016 // !(C | Z)
1017 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1018 {
1019     gen_mov_reg_Z(cpu_tmp0, src);
1020     gen_mov_reg_C(dst, src);
1021     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1022     tcg_gen_xori_tl(dst, dst, 0x1);
1023 }
1024
1025 // !C
1026 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1027 {
1028     gen_mov_reg_C(dst, src);
1029     tcg_gen_xori_tl(dst, dst, 0x1);
1030 }
1031
1032 // !N
1033 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1034 {
1035     gen_mov_reg_N(dst, src);
1036     tcg_gen_xori_tl(dst, dst, 0x1);
1037 }
1038
1039 // !V
1040 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1041 {
1042     gen_mov_reg_V(dst, src);
1043     tcg_gen_xori_tl(dst, dst, 0x1);
1044 }
1045
1046 /*
1047   FPSR bit field FCC1 | FCC0:
1048    0 =
1049    1 <
1050    2 >
1051    3 unordered
1052 */
1053 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1054                                     unsigned int fcc_offset)
1055 {
1056     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1057     tcg_gen_andi_tl(reg, reg, 0x1);
1058 }
1059
1060 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1061                                     unsigned int fcc_offset)
1062 {
1063     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1064     tcg_gen_andi_tl(reg, reg, 0x1);
1065 }
1066
1067 // !0: FCC0 | FCC1
1068 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1069                                     unsigned int fcc_offset)
1070 {
1071     gen_mov_reg_FCC0(dst, src, fcc_offset);
1072     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1073     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1074 }
1075
1076 // 1 or 2: FCC0 ^ FCC1
1077 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1078                                     unsigned int fcc_offset)
1079 {
1080     gen_mov_reg_FCC0(dst, src, fcc_offset);
1081     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1082     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1083 }
1084
1085 // 1 or 3: FCC0
1086 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1087                                     unsigned int fcc_offset)
1088 {
1089     gen_mov_reg_FCC0(dst, src, fcc_offset);
1090 }
1091
1092 // 1: FCC0 & !FCC1
1093 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1094                                     unsigned int fcc_offset)
1095 {
1096     gen_mov_reg_FCC0(dst, src, fcc_offset);
1097     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1098     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1099     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1100 }
1101
1102 // 2 or 3: FCC1
1103 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1104                                     unsigned int fcc_offset)
1105 {
1106     gen_mov_reg_FCC1(dst, src, fcc_offset);
1107 }
1108
1109 // 2: !FCC0 & FCC1
1110 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1111                                     unsigned int fcc_offset)
1112 {
1113     gen_mov_reg_FCC0(dst, src, fcc_offset);
1114     tcg_gen_xori_tl(dst, dst, 0x1);
1115     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1116     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1117 }
1118
1119 // 3: FCC0 & FCC1
1120 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1121                                     unsigned int fcc_offset)
1122 {
1123     gen_mov_reg_FCC0(dst, src, fcc_offset);
1124     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1125     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1126 }
1127
1128 // 0: !(FCC0 | FCC1)
1129 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1130                                     unsigned int fcc_offset)
1131 {
1132     gen_mov_reg_FCC0(dst, src, fcc_offset);
1133     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1134     tcg_gen_or_tl(dst, dst, cpu_tmp0);
1135     tcg_gen_xori_tl(dst, dst, 0x1);
1136 }
1137
1138 // 0 or 3: !(FCC0 ^ FCC1)
1139 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1140                                     unsigned int fcc_offset)
1141 {
1142     gen_mov_reg_FCC0(dst, src, fcc_offset);
1143     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1144     tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1145     tcg_gen_xori_tl(dst, dst, 0x1);
1146 }
1147
1148 // 0 or 2: !FCC0
1149 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1150                                     unsigned int fcc_offset)
1151 {
1152     gen_mov_reg_FCC0(dst, src, fcc_offset);
1153     tcg_gen_xori_tl(dst, dst, 0x1);
1154 }
1155
1156 // !1: !(FCC0 & !FCC1)
1157 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1158                                     unsigned int fcc_offset)
1159 {
1160     gen_mov_reg_FCC0(dst, src, fcc_offset);
1161     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1162     tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1163     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1164     tcg_gen_xori_tl(dst, dst, 0x1);
1165 }
1166
1167 // 0 or 1: !FCC1
1168 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1169                                     unsigned int fcc_offset)
1170 {
1171     gen_mov_reg_FCC1(dst, src, fcc_offset);
1172     tcg_gen_xori_tl(dst, dst, 0x1);
1173 }
1174
1175 // !2: !(!FCC0 & FCC1)
1176 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1177                                     unsigned int fcc_offset)
1178 {
1179     gen_mov_reg_FCC0(dst, src, fcc_offset);
1180     tcg_gen_xori_tl(dst, dst, 0x1);
1181     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1182     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1183     tcg_gen_xori_tl(dst, dst, 0x1);
1184 }
1185
1186 // !3: !(FCC0 & FCC1)
1187 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1188                                     unsigned int fcc_offset)
1189 {
1190     gen_mov_reg_FCC0(dst, src, fcc_offset);
1191     gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1192     tcg_gen_and_tl(dst, dst, cpu_tmp0);
1193     tcg_gen_xori_tl(dst, dst, 0x1);
1194 }
1195
1196 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1197                                target_ulong pc2, TCGv r_cond)
1198 {
1199     int l1;
1200
1201     l1 = gen_new_label();
1202
1203     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1204
1205     gen_goto_tb(dc, 0, pc1, pc1 + 4);
1206
1207     gen_set_label(l1);
1208     gen_goto_tb(dc, 1, pc2, pc2 + 4);
1209 }
1210
1211 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1212                                 target_ulong pc2, TCGv r_cond)
1213 {
1214     int l1;
1215
1216     l1 = gen_new_label();
1217
1218     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1219
1220     gen_goto_tb(dc, 0, pc2, pc1);
1221
1222     gen_set_label(l1);
1223     gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1224 }
1225
1226 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1227                                       TCGv r_cond)
1228 {
1229     int l1, l2;
1230
1231     l1 = gen_new_label();
1232     l2 = gen_new_label();
1233
1234     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1235
1236     tcg_gen_movi_tl(cpu_npc, npc1);
1237     tcg_gen_br(l2);
1238
1239     gen_set_label(l1);
1240     tcg_gen_movi_tl(cpu_npc, npc2);
1241     gen_set_label(l2);
1242 }
1243
1244 /* call this function before using the condition register as it may
1245    have been set for a jump */
1246 static inline void flush_cond(DisasContext *dc, TCGv cond)
1247 {
1248     if (dc->npc == JUMP_PC) {
1249         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1250         dc->npc = DYNAMIC_PC;
1251     }
1252 }
1253
1254 static inline void save_npc(DisasContext *dc, TCGv cond)
1255 {
1256     if (dc->npc == JUMP_PC) {
1257         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1258         dc->npc = DYNAMIC_PC;
1259     } else if (dc->npc != DYNAMIC_PC) {
1260         tcg_gen_movi_tl(cpu_npc, dc->npc);
1261     }
1262 }
1263
1264 static inline void save_state(DisasContext *dc, TCGv cond)
1265 {
1266     tcg_gen_movi_tl(cpu_pc, dc->pc);
1267     save_npc(dc, cond);
1268 }
1269
1270 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1271 {
1272     if (dc->npc == JUMP_PC) {
1273         gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1274         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1275         dc->pc = DYNAMIC_PC;
1276     } else if (dc->npc == DYNAMIC_PC) {
1277         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1278         dc->pc = DYNAMIC_PC;
1279     } else {
1280         dc->pc = dc->npc;
1281     }
1282 }
1283
1284 static inline void gen_op_next_insn(void)
1285 {
1286     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1287     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1288 }
1289
1290 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1291                             DisasContext *dc)
1292 {
1293     TCGv_i32 r_src;
1294
1295 #ifdef TARGET_SPARC64
1296     if (cc)
1297         r_src = cpu_xcc;
1298     else
1299         r_src = cpu_psr;
1300 #else
1301     r_src = cpu_psr;
1302 #endif
1303     switch (dc->cc_op) {
1304     case CC_OP_FLAGS:
1305         break;
1306     default:
1307         gen_helper_compute_psr();
1308         dc->cc_op = CC_OP_FLAGS;
1309         break;
1310     }
1311     switch (cond) {
1312     case 0x0:
1313         gen_op_eval_bn(r_dst);
1314         break;
1315     case 0x1:
1316         gen_op_eval_be(r_dst, r_src);
1317         break;
1318     case 0x2:
1319         gen_op_eval_ble(r_dst, r_src);
1320         break;
1321     case 0x3:
1322         gen_op_eval_bl(r_dst, r_src);
1323         break;
1324     case 0x4:
1325         gen_op_eval_bleu(r_dst, r_src);
1326         break;
1327     case 0x5:
1328         gen_op_eval_bcs(r_dst, r_src);
1329         break;
1330     case 0x6:
1331         gen_op_eval_bneg(r_dst, r_src);
1332         break;
1333     case 0x7:
1334         gen_op_eval_bvs(r_dst, r_src);
1335         break;
1336     case 0x8:
1337         gen_op_eval_ba(r_dst);
1338         break;
1339     case 0x9:
1340         gen_op_eval_bne(r_dst, r_src);
1341         break;
1342     case 0xa:
1343         gen_op_eval_bg(r_dst, r_src);
1344         break;
1345     case 0xb:
1346         gen_op_eval_bge(r_dst, r_src);
1347         break;
1348     case 0xc:
1349         gen_op_eval_bgu(r_dst, r_src);
1350         break;
1351     case 0xd:
1352         gen_op_eval_bcc(r_dst, r_src);
1353         break;
1354     case 0xe:
1355         gen_op_eval_bpos(r_dst, r_src);
1356         break;
1357     case 0xf:
1358         gen_op_eval_bvc(r_dst, r_src);
1359         break;
1360     }
1361 }
1362
1363 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1364 {
1365     unsigned int offset;
1366
1367     switch (cc) {
1368     default:
1369     case 0x0:
1370         offset = 0;
1371         break;
1372     case 0x1:
1373         offset = 32 - 10;
1374         break;
1375     case 0x2:
1376         offset = 34 - 10;
1377         break;
1378     case 0x3:
1379         offset = 36 - 10;
1380         break;
1381     }
1382
1383     switch (cond) {
1384     case 0x0:
1385         gen_op_eval_bn(r_dst);
1386         break;
1387     case 0x1:
1388         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1389         break;
1390     case 0x2:
1391         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1392         break;
1393     case 0x3:
1394         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1395         break;
1396     case 0x4:
1397         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1398         break;
1399     case 0x5:
1400         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1401         break;
1402     case 0x6:
1403         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1404         break;
1405     case 0x7:
1406         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1407         break;
1408     case 0x8:
1409         gen_op_eval_ba(r_dst);
1410         break;
1411     case 0x9:
1412         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1413         break;
1414     case 0xa:
1415         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1416         break;
1417     case 0xb:
1418         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1419         break;
1420     case 0xc:
1421         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1422         break;
1423     case 0xd:
1424         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1425         break;
1426     case 0xe:
1427         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1428         break;
1429     case 0xf:
1430         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1431         break;
1432     }
1433 }
1434
1435 #ifdef TARGET_SPARC64
1436 // Inverted logic
1437 static const int gen_tcg_cond_reg[8] = {
1438     -1,
1439     TCG_COND_NE,
1440     TCG_COND_GT,
1441     TCG_COND_GE,
1442     -1,
1443     TCG_COND_EQ,
1444     TCG_COND_LE,
1445     TCG_COND_LT,
1446 };
1447
1448 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1449 {
1450     int l1;
1451
1452     l1 = gen_new_label();
1453     tcg_gen_movi_tl(r_dst, 0);
1454     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1455     tcg_gen_movi_tl(r_dst, 1);
1456     gen_set_label(l1);
1457 }
1458 #endif
1459
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1462                       TCGv r_cond)
1463 {
1464     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1465     target_ulong target = dc->pc + offset;
1466
1467     if (cond == 0x0) {
1468         /* unconditional not taken */
1469         if (a) {
1470             dc->pc = dc->npc + 4;
1471             dc->npc = dc->pc + 4;
1472         } else {
1473             dc->pc = dc->npc;
1474             dc->npc = dc->pc + 4;
1475         }
1476     } else if (cond == 0x8) {
1477         /* unconditional taken */
1478         if (a) {
1479             dc->pc = target;
1480             dc->npc = dc->pc + 4;
1481         } else {
1482             dc->pc = dc->npc;
1483             dc->npc = target;
1484         }
1485     } else {
1486         flush_cond(dc, r_cond);
1487         gen_cond(r_cond, cc, cond, dc);
1488         if (a) {
1489             gen_branch_a(dc, target, dc->npc, r_cond);
1490             dc->is_br = 1;
1491         } else {
1492             dc->pc = dc->npc;
1493             dc->jump_pc[0] = target;
1494             dc->jump_pc[1] = dc->npc + 4;
1495             dc->npc = JUMP_PC;
1496         }
1497     }
1498 }
1499
1500 /* XXX: potentially incorrect if dynamic npc */
1501 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1502                       TCGv r_cond)
1503 {
1504     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1505     target_ulong target = dc->pc + offset;
1506
1507     if (cond == 0x0) {
1508         /* unconditional not taken */
1509         if (a) {
1510             dc->pc = dc->npc + 4;
1511             dc->npc = dc->pc + 4;
1512         } else {
1513             dc->pc = dc->npc;
1514             dc->npc = dc->pc + 4;
1515         }
1516     } else if (cond == 0x8) {
1517         /* unconditional taken */
1518         if (a) {
1519             dc->pc = target;
1520             dc->npc = dc->pc + 4;
1521         } else {
1522             dc->pc = dc->npc;
1523             dc->npc = target;
1524         }
1525     } else {
1526         flush_cond(dc, r_cond);
1527         gen_fcond(r_cond, cc, cond);
1528         if (a) {
1529             gen_branch_a(dc, target, dc->npc, r_cond);
1530             dc->is_br = 1;
1531         } else {
1532             dc->pc = dc->npc;
1533             dc->jump_pc[0] = target;
1534             dc->jump_pc[1] = dc->npc + 4;
1535             dc->npc = JUMP_PC;
1536         }
1537     }
1538 }
1539
1540 #ifdef TARGET_SPARC64
1541 /* XXX: potentially incorrect if dynamic npc */
1542 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1543                           TCGv r_cond, TCGv r_reg)
1544 {
1545     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1546     target_ulong target = dc->pc + offset;
1547
1548     flush_cond(dc, r_cond);
1549     gen_cond_reg(r_cond, cond, r_reg);
1550     if (a) {
1551         gen_branch_a(dc, target, dc->npc, r_cond);
1552         dc->is_br = 1;
1553     } else {
1554         dc->pc = dc->npc;
1555         dc->jump_pc[0] = target;
1556         dc->jump_pc[1] = dc->npc + 4;
1557         dc->npc = JUMP_PC;
1558     }
1559 }
1560
1561 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1562 {
1563     switch (fccno) {
1564     case 0:
1565         gen_helper_fcmps(r_rs1, r_rs2);
1566         break;
1567     case 1:
1568         gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1569         break;
1570     case 2:
1571         gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1572         break;
1573     case 3:
1574         gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1575         break;
1576     }
1577 }
1578
1579 static inline void gen_op_fcmpd(int fccno)
1580 {
1581     switch (fccno) {
1582     case 0:
1583         gen_helper_fcmpd();
1584         break;
1585     case 1:
1586         gen_helper_fcmpd_fcc1();
1587         break;
1588     case 2:
1589         gen_helper_fcmpd_fcc2();
1590         break;
1591     case 3:
1592         gen_helper_fcmpd_fcc3();
1593         break;
1594     }
1595 }
1596
1597 static inline void gen_op_fcmpq(int fccno)
1598 {
1599     switch (fccno) {
1600     case 0:
1601         gen_helper_fcmpq();
1602         break;
1603     case 1:
1604         gen_helper_fcmpq_fcc1();
1605         break;
1606     case 2:
1607         gen_helper_fcmpq_fcc2();
1608         break;
1609     case 3:
1610         gen_helper_fcmpq_fcc3();
1611         break;
1612     }
1613 }
1614
1615 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1616 {
1617     switch (fccno) {
1618     case 0:
1619         gen_helper_fcmpes(r_rs1, r_rs2);
1620         break;
1621     case 1:
1622         gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1623         break;
1624     case 2:
1625         gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1626         break;
1627     case 3:
1628         gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1629         break;
1630     }
1631 }
1632
1633 static inline void gen_op_fcmped(int fccno)
1634 {
1635     switch (fccno) {
1636     case 0:
1637         gen_helper_fcmped();
1638         break;
1639     case 1:
1640         gen_helper_fcmped_fcc1();
1641         break;
1642     case 2:
1643         gen_helper_fcmped_fcc2();
1644         break;
1645     case 3:
1646         gen_helper_fcmped_fcc3();
1647         break;
1648     }
1649 }
1650
1651 static inline void gen_op_fcmpeq(int fccno)
1652 {
1653     switch (fccno) {
1654     case 0:
1655         gen_helper_fcmpeq();
1656         break;
1657     case 1:
1658         gen_helper_fcmpeq_fcc1();
1659         break;
1660     case 2:
1661         gen_helper_fcmpeq_fcc2();
1662         break;
1663     case 3:
1664         gen_helper_fcmpeq_fcc3();
1665         break;
1666     }
1667 }
1668
1669 #else
1670
1671 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1672 {
1673     gen_helper_fcmps(r_rs1, r_rs2);
1674 }
1675
1676 static inline void gen_op_fcmpd(int fccno)
1677 {
1678     gen_helper_fcmpd();
1679 }
1680
1681 static inline void gen_op_fcmpq(int fccno)
1682 {
1683     gen_helper_fcmpq();
1684 }
1685
1686 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1687 {
1688     gen_helper_fcmpes(r_rs1, r_rs2);
1689 }
1690
1691 static inline void gen_op_fcmped(int fccno)
1692 {
1693     gen_helper_fcmped();
1694 }
1695
1696 static inline void gen_op_fcmpeq(int fccno)
1697 {
1698     gen_helper_fcmpeq();
1699 }
1700 #endif
1701
1702 static inline void gen_op_fpexception_im(int fsr_flags)
1703 {
1704     TCGv_i32 r_const;
1705
1706     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1707     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1708     r_const = tcg_const_i32(TT_FP_EXCP);
1709     gen_helper_raise_exception(r_const);
1710     tcg_temp_free_i32(r_const);
1711 }
1712
1713 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1714 {
1715 #if !defined(CONFIG_USER_ONLY)
1716     if (!dc->fpu_enabled) {
1717         TCGv_i32 r_const;
1718
1719         save_state(dc, r_cond);
1720         r_const = tcg_const_i32(TT_NFPU_INSN);
1721         gen_helper_raise_exception(r_const);
1722         tcg_temp_free_i32(r_const);
1723         dc->is_br = 1;
1724         return 1;
1725     }
1726 #endif
1727     return 0;
1728 }
1729
1730 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1731 {
1732     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1733 }
1734
1735 static inline void gen_clear_float_exceptions(void)
1736 {
1737     gen_helper_clear_float_exceptions();
1738 }
1739
1740 /* asi moves */
1741 #ifdef TARGET_SPARC64
1742 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1743 {
1744     int asi;
1745     TCGv_i32 r_asi;
1746
1747     if (IS_IMM) {
1748         r_asi = tcg_temp_new_i32();
1749         tcg_gen_mov_i32(r_asi, cpu_asi);
1750     } else {
1751         asi = GET_FIELD(insn, 19, 26);
1752         r_asi = tcg_const_i32(asi);
1753     }
1754     return r_asi;
1755 }
1756
1757 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1758                               int sign)
1759 {
1760     TCGv_i32 r_asi, r_size, r_sign;
1761
1762     r_asi = gen_get_asi(insn, addr);
1763     r_size = tcg_const_i32(size);
1764     r_sign = tcg_const_i32(sign);
1765     gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1766     tcg_temp_free_i32(r_sign);
1767     tcg_temp_free_i32(r_size);
1768     tcg_temp_free_i32(r_asi);
1769 }
1770
1771 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1772 {
1773     TCGv_i32 r_asi, r_size;
1774
1775     r_asi = gen_get_asi(insn, addr);
1776     r_size = tcg_const_i32(size);
1777     gen_helper_st_asi(addr, src, r_asi, r_size);
1778     tcg_temp_free_i32(r_size);
1779     tcg_temp_free_i32(r_asi);
1780 }
1781
1782 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1783 {
1784     TCGv_i32 r_asi, r_size, r_rd;
1785
1786     r_asi = gen_get_asi(insn, addr);
1787     r_size = tcg_const_i32(size);
1788     r_rd = tcg_const_i32(rd);
1789     gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1790     tcg_temp_free_i32(r_rd);
1791     tcg_temp_free_i32(r_size);
1792     tcg_temp_free_i32(r_asi);
1793 }
1794
1795 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1796 {
1797     TCGv_i32 r_asi, r_size, r_rd;
1798
1799     r_asi = gen_get_asi(insn, addr);
1800     r_size = tcg_const_i32(size);
1801     r_rd = tcg_const_i32(rd);
1802     gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1803     tcg_temp_free_i32(r_rd);
1804     tcg_temp_free_i32(r_size);
1805     tcg_temp_free_i32(r_asi);
1806 }
1807
1808 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1809 {
1810     TCGv_i32 r_asi, r_size, r_sign;
1811
1812     r_asi = gen_get_asi(insn, addr);
1813     r_size = tcg_const_i32(4);
1814     r_sign = tcg_const_i32(0);
1815     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1816     tcg_temp_free_i32(r_sign);
1817     gen_helper_st_asi(addr, dst, r_asi, r_size);
1818     tcg_temp_free_i32(r_size);
1819     tcg_temp_free_i32(r_asi);
1820     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1821 }
1822
1823 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1824 {
1825     TCGv_i32 r_asi, r_rd;
1826
1827     r_asi = gen_get_asi(insn, addr);
1828     r_rd = tcg_const_i32(rd);
1829     gen_helper_ldda_asi(addr, r_asi, r_rd);
1830     tcg_temp_free_i32(r_rd);
1831     tcg_temp_free_i32(r_asi);
1832 }
1833
1834 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1835 {
1836     TCGv_i32 r_asi, r_size;
1837
1838     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1839     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1840     r_asi = gen_get_asi(insn, addr);
1841     r_size = tcg_const_i32(8);
1842     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1843     tcg_temp_free_i32(r_size);
1844     tcg_temp_free_i32(r_asi);
1845 }
1846
1847 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1848                                int rd)
1849 {
1850     TCGv r_val1;
1851     TCGv_i32 r_asi;
1852
1853     r_val1 = tcg_temp_new();
1854     gen_movl_reg_TN(rd, r_val1);
1855     r_asi = gen_get_asi(insn, addr);
1856     gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1857     tcg_temp_free_i32(r_asi);
1858     tcg_temp_free(r_val1);
1859 }
1860
1861 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1862                                 int rd)
1863 {
1864     TCGv_i32 r_asi;
1865
1866     gen_movl_reg_TN(rd, cpu_tmp64);
1867     r_asi = gen_get_asi(insn, addr);
1868     gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1869     tcg_temp_free_i32(r_asi);
1870 }
1871
1872 #elif !defined(CONFIG_USER_ONLY)
1873
1874 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1875                               int sign)
1876 {
1877     TCGv_i32 r_asi, r_size, r_sign;
1878
1879     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1880     r_size = tcg_const_i32(size);
1881     r_sign = tcg_const_i32(sign);
1882     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1883     tcg_temp_free(r_sign);
1884     tcg_temp_free(r_size);
1885     tcg_temp_free(r_asi);
1886     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1887 }
1888
1889 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1890 {
1891     TCGv_i32 r_asi, r_size;
1892
1893     tcg_gen_extu_tl_i64(cpu_tmp64, src);
1894     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1895     r_size = tcg_const_i32(size);
1896     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1897     tcg_temp_free(r_size);
1898     tcg_temp_free(r_asi);
1899 }
1900
1901 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1902 {
1903     TCGv_i32 r_asi, r_size, r_sign;
1904     TCGv_i64 r_val;
1905
1906     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1907     r_size = tcg_const_i32(4);
1908     r_sign = tcg_const_i32(0);
1909     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1910     tcg_temp_free(r_sign);
1911     r_val = tcg_temp_new_i64();
1912     tcg_gen_extu_tl_i64(r_val, dst);
1913     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1914     tcg_temp_free_i64(r_val);
1915     tcg_temp_free(r_size);
1916     tcg_temp_free(r_asi);
1917     tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1918 }
1919
1920 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1921 {
1922     TCGv_i32 r_asi, r_size, r_sign;
1923
1924     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1925     r_size = tcg_const_i32(8);
1926     r_sign = tcg_const_i32(0);
1927     gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1928     tcg_temp_free(r_sign);
1929     tcg_temp_free(r_size);
1930     tcg_temp_free(r_asi);
1931     tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1932     gen_movl_TN_reg(rd + 1, cpu_tmp0);
1933     tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1934     tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1935     gen_movl_TN_reg(rd, hi);
1936 }
1937
1938 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1939 {
1940     TCGv_i32 r_asi, r_size;
1941
1942     gen_movl_reg_TN(rd + 1, cpu_tmp0);
1943     tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1944     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1945     r_size = tcg_const_i32(8);
1946     gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1947     tcg_temp_free(r_size);
1948     tcg_temp_free(r_asi);
1949 }
1950 #endif
1951
1952 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1953 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1954 {
1955     TCGv_i64 r_val;
1956     TCGv_i32 r_asi, r_size;
1957
1958     gen_ld_asi(dst, addr, insn, 1, 0);
1959
1960     r_val = tcg_const_i64(0xffULL);
1961     r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1962     r_size = tcg_const_i32(1);
1963     gen_helper_st_asi(addr, r_val, r_asi, r_size);
1964     tcg_temp_free_i32(r_size);
1965     tcg_temp_free_i32(r_asi);
1966     tcg_temp_free_i64(r_val);
1967 }
1968 #endif
1969
1970 static inline TCGv get_src1(unsigned int insn, TCGv def)
1971 {
1972     TCGv r_rs1 = def;
1973     unsigned int rs1;
1974
1975     rs1 = GET_FIELD(insn, 13, 17);
1976     if (rs1 == 0)
1977         r_rs1 = tcg_const_tl(0); // XXX how to free?
1978     else if (rs1 < 8)
1979         r_rs1 = cpu_gregs[rs1];
1980     else
1981         tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1982     return r_rs1;
1983 }
1984
1985 static inline TCGv get_src2(unsigned int insn, TCGv def)
1986 {
1987     TCGv r_rs2 = def;
1988
1989     if (IS_IMM) { /* immediate */
1990         target_long simm;
1991
1992         simm = GET_FIELDs(insn, 19, 31);
1993         r_rs2 = tcg_const_tl(simm); // XXX how to free?
1994     } else { /* register */
1995         unsigned int rs2;
1996
1997         rs2 = GET_FIELD(insn, 27, 31);
1998         if (rs2 == 0)
1999             r_rs2 = tcg_const_tl(0); // XXX how to free?
2000         else if (rs2 < 8)
2001             r_rs2 = cpu_gregs[rs2];
2002         else
2003             tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2004     }
2005     return r_rs2;
2006 }
2007
2008 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2009     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2010         goto illegal_insn;
2011 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2012     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2013         goto nfpu_insn;
2014
2015 /* before an instruction, dc->pc must be static */
2016 static void disas_sparc_insn(DisasContext * dc)
2017 {
2018     unsigned int insn, opc, rs1, rs2, rd;
2019     target_long simm;
2020
2021     if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2022         tcg_gen_debug_insn_start(dc->pc);
2023     insn = ldl_code(dc->pc);
2024     opc = GET_FIELD(insn, 0, 1);
2025
2026     rd = GET_FIELD(insn, 2, 6);
2027
2028     cpu_src1 = tcg_temp_new(); // const
2029     cpu_src2 = tcg_temp_new(); // const
2030
2031     switch (opc) {
2032     case 0:                     /* branches/sethi */
2033         {
2034             unsigned int xop = GET_FIELD(insn, 7, 9);
2035             int32_t target;
2036             switch (xop) {
2037 #ifdef TARGET_SPARC64
2038             case 0x1:           /* V9 BPcc */
2039                 {
2040                     int cc;
2041
2042                     target = GET_FIELD_SP(insn, 0, 18);
2043                     target = sign_extend(target, 18);
2044                     target <<= 2;
2045                     cc = GET_FIELD_SP(insn, 20, 21);
2046                     if (cc == 0)
2047                         do_branch(dc, target, insn, 0, cpu_cond);
2048                     else if (cc == 2)
2049                         do_branch(dc, target, insn, 1, cpu_cond);
2050                     else
2051                         goto illegal_insn;
2052                     goto jmp_insn;
2053                 }
2054             case 0x3:           /* V9 BPr */
2055                 {
2056                     target = GET_FIELD_SP(insn, 0, 13) |
2057                         (GET_FIELD_SP(insn, 20, 21) << 14);
2058                     target = sign_extend(target, 16);
2059                     target <<= 2;
2060                     cpu_src1 = get_src1(insn, cpu_src1);
2061                     do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2062                     goto jmp_insn;
2063                 }
2064             case 0x5:           /* V9 FBPcc */
2065                 {
2066                     int cc = GET_FIELD_SP(insn, 20, 21);
2067                     if (gen_trap_ifnofpu(dc, cpu_cond))
2068                         goto jmp_insn;
2069                     target = GET_FIELD_SP(insn, 0, 18);
2070                     target = sign_extend(target, 19);
2071                     target <<= 2;
2072                     do_fbranch(dc, target, insn, cc, cpu_cond);
2073                     goto jmp_insn;
2074                 }
2075 #else
2076             case 0x7:           /* CBN+x */
2077                 {
2078                     goto ncp_insn;
2079                 }
2080 #endif
2081             case 0x2:           /* BN+x */
2082                 {
2083                     target = GET_FIELD(insn, 10, 31);
2084                     target = sign_extend(target, 22);
2085                     target <<= 2;
2086                     do_branch(dc, target, insn, 0, cpu_cond);
2087                     goto jmp_insn;
2088                 }
2089             case 0x6:           /* FBN+x */
2090                 {
2091                     if (gen_trap_ifnofpu(dc, cpu_cond))
2092                         goto jmp_insn;
2093                     target = GET_FIELD(insn, 10, 31);
2094                     target = sign_extend(target, 22);
2095                     target <<= 2;
2096                     do_fbranch(dc, target, insn, 0, cpu_cond);
2097                     goto jmp_insn;
2098                 }
2099             case 0x4:           /* SETHI */
2100                 if (rd) { // nop
2101                     uint32_t value = GET_FIELD(insn, 10, 31);
2102                     TCGv r_const;
2103
2104                     r_const = tcg_const_tl(value << 10);
2105                     gen_movl_TN_reg(rd, r_const);
2106                     tcg_temp_free(r_const);
2107                 }
2108                 break;
2109             case 0x0:           /* UNIMPL */
2110             default:
2111                 goto illegal_insn;
2112             }
2113             break;
2114         }
2115         break;
2116     case 1:                     /*CALL*/
2117         {
2118             target_long target = GET_FIELDs(insn, 2, 31) << 2;
2119             TCGv r_const;
2120
2121             r_const = tcg_const_tl(dc->pc);
2122             gen_movl_TN_reg(15, r_const);
2123             tcg_temp_free(r_const);
2124             target += dc->pc;
2125             gen_mov_pc_npc(dc, cpu_cond);
2126             dc->npc = target;
2127         }
2128         goto jmp_insn;
2129     case 2:                     /* FPU & Logical Operations */
2130         {
2131             unsigned int xop = GET_FIELD(insn, 7, 12);
2132             if (xop == 0x3a) {  /* generate trap */
2133                 int cond;
2134
2135                 cpu_src1 = get_src1(insn, cpu_src1);
2136                 if (IS_IMM) {
2137                     rs2 = GET_FIELD(insn, 25, 31);
2138                     tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2139                 } else {
2140                     rs2 = GET_FIELD(insn, 27, 31);
2141                     if (rs2 != 0) {
2142                         gen_movl_reg_TN(rs2, cpu_src2);
2143                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2144                     } else
2145                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
2146                 }
2147                 cond = GET_FIELD(insn, 3, 6);
2148                 if (cond == 0x8) {
2149                     save_state(dc, cpu_cond);
2150                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2151                         supervisor(dc))
2152                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2153                     else
2154                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2155                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2156                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2157                     gen_helper_raise_exception(cpu_tmp32);
2158                 } else if (cond != 0) {
2159                     TCGv r_cond = tcg_temp_new();
2160                     int l1;
2161 #ifdef TARGET_SPARC64
2162                     /* V9 icc/xcc */
2163                     int cc = GET_FIELD_SP(insn, 11, 12);
2164
2165                     save_state(dc, cpu_cond);
2166                     if (cc == 0)
2167                         gen_cond(r_cond, 0, cond, dc);
2168                     else if (cc == 2)
2169                         gen_cond(r_cond, 1, cond, dc);
2170                     else
2171                         goto illegal_insn;
2172 #else
2173                     save_state(dc, cpu_cond);
2174                     gen_cond(r_cond, 0, cond, dc);
2175 #endif
2176                     l1 = gen_new_label();
2177                     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2178
2179                     if ((dc->def->features & CPU_FEATURE_HYPV) &&
2180                         supervisor(dc))
2181                         tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2182                     else
2183                         tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2184                     tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2185                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2186                     gen_helper_raise_exception(cpu_tmp32);
2187
2188                     gen_set_label(l1);
2189                     tcg_temp_free(r_cond);
2190                 }
2191                 gen_op_next_insn();
2192                 tcg_gen_exit_tb(0);
2193                 dc->is_br = 1;
2194                 goto jmp_insn;
2195             } else if (xop == 0x28) {
2196                 rs1 = GET_FIELD(insn, 13, 17);
2197                 switch(rs1) {
2198                 case 0: /* rdy */
2199 #ifndef TARGET_SPARC64
2200                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2201                                        manual, rdy on the microSPARC
2202                                        II */
2203                 case 0x0f:          /* stbar in the SPARCv8 manual,
2204                                        rdy on the microSPARC II */
2205                 case 0x10 ... 0x1f: /* implementation-dependent in the
2206                                        SPARCv8 manual, rdy on the
2207                                        microSPARC II */
2208 #endif
2209                     gen_movl_TN_reg(rd, cpu_y);
2210                     break;
2211 #ifdef TARGET_SPARC64
2212                 case 0x2: /* V9 rdccr */
2213                     gen_helper_compute_psr();
2214                     gen_helper_rdccr(cpu_dst);
2215                     gen_movl_TN_reg(rd, cpu_dst);
2216                     break;
2217                 case 0x3: /* V9 rdasi */
2218                     tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2219                     gen_movl_TN_reg(rd, cpu_dst);
2220                     break;
2221                 case 0x4: /* V9 rdtick */
2222                     {
2223                         TCGv_ptr r_tickptr;
2224
2225                         r_tickptr = tcg_temp_new_ptr();
2226                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2227                                        offsetof(CPUState, tick));
2228                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2229                         tcg_temp_free_ptr(r_tickptr);
2230                         gen_movl_TN_reg(rd, cpu_dst);
2231                     }
2232                     break;
2233                 case 0x5: /* V9 rdpc */
2234                     {
2235                         TCGv r_const;
2236
2237                         r_const = tcg_const_tl(dc->pc);
2238                         gen_movl_TN_reg(rd, r_const);
2239                         tcg_temp_free(r_const);
2240                     }
2241                     break;
2242                 case 0x6: /* V9 rdfprs */
2243                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2244                     gen_movl_TN_reg(rd, cpu_dst);
2245                     break;
2246                 case 0xf: /* V9 membar */
2247                     break; /* no effect */
2248                 case 0x13: /* Graphics Status */
2249                     if (gen_trap_ifnofpu(dc, cpu_cond))
2250                         goto jmp_insn;
2251                     gen_movl_TN_reg(rd, cpu_gsr);
2252                     break;
2253                 case 0x16: /* Softint */
2254                     tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2255                     gen_movl_TN_reg(rd, cpu_dst);
2256                     break;
2257                 case 0x17: /* Tick compare */
2258                     gen_movl_TN_reg(rd, cpu_tick_cmpr);
2259                     break;
2260                 case 0x18: /* System tick */
2261                     {
2262                         TCGv_ptr r_tickptr;
2263
2264                         r_tickptr = tcg_temp_new_ptr();
2265                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2266                                        offsetof(CPUState, stick));
2267                         gen_helper_tick_get_count(cpu_dst, r_tickptr);
2268                         tcg_temp_free_ptr(r_tickptr);
2269                         gen_movl_TN_reg(rd, cpu_dst);
2270                     }
2271                     break;
2272                 case 0x19: /* System tick compare */
2273                     gen_movl_TN_reg(rd, cpu_stick_cmpr);
2274                     break;
2275                 case 0x10: /* Performance Control */
2276                 case 0x11: /* Performance Instrumentation Counter */
2277                 case 0x12: /* Dispatch Control */
2278                 case 0x14: /* Softint set, WO */
2279                 case 0x15: /* Softint clear, WO */
2280 #endif
2281                 default:
2282                     goto illegal_insn;
2283                 }
2284 #if !defined(CONFIG_USER_ONLY)
2285             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2286 #ifndef TARGET_SPARC64
2287                 if (!supervisor(dc))
2288                     goto priv_insn;
2289                 gen_helper_compute_psr();
2290                 dc->cc_op = CC_OP_FLAGS;
2291                 gen_helper_rdpsr(cpu_dst);
2292 #else
2293                 CHECK_IU_FEATURE(dc, HYPV);
2294                 if (!hypervisor(dc))
2295                     goto priv_insn;
2296                 rs1 = GET_FIELD(insn, 13, 17);
2297                 switch (rs1) {
2298                 case 0: // hpstate
2299                     // gen_op_rdhpstate();
2300                     break;
2301                 case 1: // htstate
2302                     // gen_op_rdhtstate();
2303                     break;
2304                 case 3: // hintp
2305                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2306                     break;
2307                 case 5: // htba
2308                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
2309                     break;
2310                 case 6: // hver
2311                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
2312                     break;
2313                 case 31: // hstick_cmpr
2314                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2315                     break;
2316                 default:
2317                     goto illegal_insn;
2318                 }
2319 #endif
2320                 gen_movl_TN_reg(rd, cpu_dst);
2321                 break;
2322             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2323                 if (!supervisor(dc))
2324                     goto priv_insn;
2325 #ifdef TARGET_SPARC64
2326                 rs1 = GET_FIELD(insn, 13, 17);
2327                 switch (rs1) {
2328                 case 0: // tpc
2329                     {
2330                         TCGv_ptr r_tsptr;
2331
2332                         r_tsptr = tcg_temp_new_ptr();
2333                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2334                                        offsetof(CPUState, tsptr));
2335                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2336                                       offsetof(trap_state, tpc));
2337                         tcg_temp_free_ptr(r_tsptr);
2338                     }
2339                     break;
2340                 case 1: // tnpc
2341                     {
2342                         TCGv_ptr r_tsptr;
2343
2344                         r_tsptr = tcg_temp_new_ptr();
2345                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2346                                        offsetof(CPUState, tsptr));
2347                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2348                                       offsetof(trap_state, tnpc));
2349                         tcg_temp_free_ptr(r_tsptr);
2350                     }
2351                     break;
2352                 case 2: // tstate
2353                     {
2354                         TCGv_ptr r_tsptr;
2355
2356                         r_tsptr = tcg_temp_new_ptr();
2357                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2358                                        offsetof(CPUState, tsptr));
2359                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2360                                       offsetof(trap_state, tstate));
2361                         tcg_temp_free_ptr(r_tsptr);
2362                     }
2363                     break;
2364                 case 3: // tt
2365                     {
2366                         TCGv_ptr r_tsptr;
2367
2368                         r_tsptr = tcg_temp_new_ptr();
2369                         tcg_gen_ld_ptr(r_tsptr, cpu_env,
2370                                        offsetof(CPUState, tsptr));
2371                         tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2372                                        offsetof(trap_state, tt));
2373                         tcg_temp_free_ptr(r_tsptr);
2374                         tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2375                     }
2376                     break;
2377                 case 4: // tick
2378                     {
2379                         TCGv_ptr r_tickptr;
2380
2381                         r_tickptr = tcg_temp_new_ptr();
2382                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
2383                                        offsetof(CPUState, tick));
2384                         gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2385                         gen_movl_TN_reg(rd, cpu_tmp0);
2386                         tcg_temp_free_ptr(r_tickptr);
2387                     }
2388                     break;
2389                 case 5: // tba
2390                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2391                     break;
2392                 case 6: // pstate
2393                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2394                                    offsetof(CPUSPARCState, pstate));
2395                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2396                     break;
2397                 case 7: // tl
2398                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2399                                    offsetof(CPUSPARCState, tl));
2400                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2401                     break;
2402                 case 8: // pil
2403                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2404                                    offsetof(CPUSPARCState, psrpil));
2405                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2406                     break;
2407                 case 9: // cwp
2408                     gen_helper_rdcwp(cpu_tmp0);
2409                     break;
2410                 case 10: // cansave
2411                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2412                                    offsetof(CPUSPARCState, cansave));
2413                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2414                     break;
2415                 case 11: // canrestore
2416                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2417                                    offsetof(CPUSPARCState, canrestore));
2418                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2419                     break;
2420                 case 12: // cleanwin
2421                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2422                                    offsetof(CPUSPARCState, cleanwin));
2423                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2424                     break;
2425                 case 13: // otherwin
2426                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2427                                    offsetof(CPUSPARCState, otherwin));
2428                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2429                     break;
2430                 case 14: // wstate
2431                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2432                                    offsetof(CPUSPARCState, wstate));
2433                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2434                     break;
2435                 case 16: // UA2005 gl
2436                     CHECK_IU_FEATURE(dc, GL);
2437                     tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2438                                    offsetof(CPUSPARCState, gl));
2439                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2440                     break;
2441                 case 26: // UA2005 strand status
2442                     CHECK_IU_FEATURE(dc, HYPV);
2443                     if (!hypervisor(dc))
2444                         goto priv_insn;
2445                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2446                     break;
2447                 case 31: // ver
2448                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2449                     break;
2450                 case 15: // fq
2451                 default:
2452                     goto illegal_insn;
2453                 }
2454 #else
2455                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2456 #endif
2457                 gen_movl_TN_reg(rd, cpu_tmp0);
2458                 break;
2459             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2460 #ifdef TARGET_SPARC64
2461                 save_state(dc, cpu_cond);
2462                 gen_helper_flushw();
2463 #else
2464                 if (!supervisor(dc))
2465                     goto priv_insn;
2466                 gen_movl_TN_reg(rd, cpu_tbr);
2467 #endif
2468                 break;
2469 #endif
2470             } else if (xop == 0x34) {   /* FPU Operations */
2471                 if (gen_trap_ifnofpu(dc, cpu_cond))
2472                     goto jmp_insn;
2473                 gen_op_clear_ieee_excp_and_FTT();
2474                 rs1 = GET_FIELD(insn, 13, 17);
2475                 rs2 = GET_FIELD(insn, 27, 31);
2476                 xop = GET_FIELD(insn, 18, 26);
2477                 switch (xop) {
2478                 case 0x1: /* fmovs */
2479                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2480                     break;
2481                 case 0x5: /* fnegs */
2482                     gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2483                     break;
2484                 case 0x9: /* fabss */
2485                     gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2486                     break;
2487                 case 0x29: /* fsqrts */
2488                     CHECK_FPU_FEATURE(dc, FSQRT);
2489                     gen_clear_float_exceptions();
2490                     gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2491                     gen_helper_check_ieee_exceptions();
2492                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2493                     break;
2494                 case 0x2a: /* fsqrtd */
2495                     CHECK_FPU_FEATURE(dc, FSQRT);
2496                     gen_op_load_fpr_DT1(DFPREG(rs2));
2497                     gen_clear_float_exceptions();
2498                     gen_helper_fsqrtd();
2499                     gen_helper_check_ieee_exceptions();
2500                     gen_op_store_DT0_fpr(DFPREG(rd));
2501                     break;
2502                 case 0x2b: /* fsqrtq */
2503                     CHECK_FPU_FEATURE(dc, FLOAT128);
2504                     gen_op_load_fpr_QT1(QFPREG(rs2));
2505                     gen_clear_float_exceptions();
2506                     gen_helper_fsqrtq();
2507                     gen_helper_check_ieee_exceptions();
2508                     gen_op_store_QT0_fpr(QFPREG(rd));
2509                     break;
2510                 case 0x41: /* fadds */
2511                     gen_clear_float_exceptions();
2512                     gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2513                     gen_helper_check_ieee_exceptions();
2514                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2515                     break;
2516                 case 0x42: /* faddd */
2517                     gen_op_load_fpr_DT0(DFPREG(rs1));
2518                     gen_op_load_fpr_DT1(DFPREG(rs2));
2519                     gen_clear_float_exceptions();
2520                     gen_helper_faddd();
2521                     gen_helper_check_ieee_exceptions();
2522                     gen_op_store_DT0_fpr(DFPREG(rd));
2523                     break;
2524                 case 0x43: /* faddq */
2525                     CHECK_FPU_FEATURE(dc, FLOAT128);
2526                     gen_op_load_fpr_QT0(QFPREG(rs1));
2527                     gen_op_load_fpr_QT1(QFPREG(rs2));
2528                     gen_clear_float_exceptions();
2529                     gen_helper_faddq();
2530                     gen_helper_check_ieee_exceptions();
2531                     gen_op_store_QT0_fpr(QFPREG(rd));
2532                     break;
2533                 case 0x45: /* fsubs */
2534                     gen_clear_float_exceptions();
2535                     gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2536                     gen_helper_check_ieee_exceptions();
2537                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2538                     break;
2539                 case 0x46: /* fsubd */
2540                     gen_op_load_fpr_DT0(DFPREG(rs1));
2541                     gen_op_load_fpr_DT1(DFPREG(rs2));
2542                     gen_clear_float_exceptions();
2543                     gen_helper_fsubd();
2544                     gen_helper_check_ieee_exceptions();
2545                     gen_op_store_DT0_fpr(DFPREG(rd));
2546                     break;
2547                 case 0x47: /* fsubq */
2548                     CHECK_FPU_FEATURE(dc, FLOAT128);
2549                     gen_op_load_fpr_QT0(QFPREG(rs1));
2550                     gen_op_load_fpr_QT1(QFPREG(rs2));
2551                     gen_clear_float_exceptions();
2552                     gen_helper_fsubq();
2553                     gen_helper_check_ieee_exceptions();
2554                     gen_op_store_QT0_fpr(QFPREG(rd));
2555                     break;
2556                 case 0x49: /* fmuls */
2557                     CHECK_FPU_FEATURE(dc, FMUL);
2558                     gen_clear_float_exceptions();
2559                     gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2560                     gen_helper_check_ieee_exceptions();
2561                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2562                     break;
2563                 case 0x4a: /* fmuld */
2564                     CHECK_FPU_FEATURE(dc, FMUL);
2565                     gen_op_load_fpr_DT0(DFPREG(rs1));
2566                     gen_op_load_fpr_DT1(DFPREG(rs2));
2567                     gen_clear_float_exceptions();
2568                     gen_helper_fmuld();
2569                     gen_helper_check_ieee_exceptions();
2570                     gen_op_store_DT0_fpr(DFPREG(rd));
2571                     break;
2572                 case 0x4b: /* fmulq */
2573                     CHECK_FPU_FEATURE(dc, FLOAT128);
2574                     CHECK_FPU_FEATURE(dc, FMUL);
2575                     gen_op_load_fpr_QT0(QFPREG(rs1));
2576                     gen_op_load_fpr_QT1(QFPREG(rs2));
2577                     gen_clear_float_exceptions();
2578                     gen_helper_fmulq();
2579                     gen_helper_check_ieee_exceptions();
2580                     gen_op_store_QT0_fpr(QFPREG(rd));
2581                     break;
2582                 case 0x4d: /* fdivs */
2583                     gen_clear_float_exceptions();
2584                     gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2585                     gen_helper_check_ieee_exceptions();
2586                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2587                     break;
2588                 case 0x4e: /* fdivd */
2589                     gen_op_load_fpr_DT0(DFPREG(rs1));
2590                     gen_op_load_fpr_DT1(DFPREG(rs2));
2591                     gen_clear_float_exceptions();
2592                     gen_helper_fdivd();
2593                     gen_helper_check_ieee_exceptions();
2594                     gen_op_store_DT0_fpr(DFPREG(rd));
2595                     break;
2596                 case 0x4f: /* fdivq */
2597                     CHECK_FPU_FEATURE(dc, FLOAT128);
2598                     gen_op_load_fpr_QT0(QFPREG(rs1));
2599                     gen_op_load_fpr_QT1(QFPREG(rs2));
2600                     gen_clear_float_exceptions();
2601                     gen_helper_fdivq();
2602                     gen_helper_check_ieee_exceptions();
2603                     gen_op_store_QT0_fpr(QFPREG(rd));
2604                     break;
2605                 case 0x69: /* fsmuld */
2606                     CHECK_FPU_FEATURE(dc, FSMULD);
2607                     gen_clear_float_exceptions();
2608                     gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2609                     gen_helper_check_ieee_exceptions();
2610                     gen_op_store_DT0_fpr(DFPREG(rd));
2611                     break;
2612                 case 0x6e: /* fdmulq */
2613                     CHECK_FPU_FEATURE(dc, FLOAT128);
2614                     gen_op_load_fpr_DT0(DFPREG(rs1));
2615                     gen_op_load_fpr_DT1(DFPREG(rs2));
2616                     gen_clear_float_exceptions();
2617                     gen_helper_fdmulq();
2618                     gen_helper_check_ieee_exceptions();
2619                     gen_op_store_QT0_fpr(QFPREG(rd));
2620                     break;
2621                 case 0xc4: /* fitos */
2622                     gen_clear_float_exceptions();
2623                     gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2624                     gen_helper_check_ieee_exceptions();
2625                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2626                     break;
2627                 case 0xc6: /* fdtos */
2628                     gen_op_load_fpr_DT1(DFPREG(rs2));
2629                     gen_clear_float_exceptions();
2630                     gen_helper_fdtos(cpu_tmp32);
2631                     gen_helper_check_ieee_exceptions();
2632                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2633                     break;
2634                 case 0xc7: /* fqtos */
2635                     CHECK_FPU_FEATURE(dc, FLOAT128);
2636                     gen_op_load_fpr_QT1(QFPREG(rs2));
2637                     gen_clear_float_exceptions();
2638                     gen_helper_fqtos(cpu_tmp32);
2639                     gen_helper_check_ieee_exceptions();
2640                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2641                     break;
2642                 case 0xc8: /* fitod */
2643                     gen_helper_fitod(cpu_fpr[rs2]);
2644                     gen_op_store_DT0_fpr(DFPREG(rd));
2645                     break;
2646                 case 0xc9: /* fstod */
2647                     gen_helper_fstod(cpu_fpr[rs2]);
2648                     gen_op_store_DT0_fpr(DFPREG(rd));
2649                     break;
2650                 case 0xcb: /* fqtod */
2651                     CHECK_FPU_FEATURE(dc, FLOAT128);
2652                     gen_op_load_fpr_QT1(QFPREG(rs2));
2653                     gen_clear_float_exceptions();
2654                     gen_helper_fqtod();
2655                     gen_helper_check_ieee_exceptions();
2656                     gen_op_store_DT0_fpr(DFPREG(rd));
2657                     break;
2658                 case 0xcc: /* fitoq */
2659                     CHECK_FPU_FEATURE(dc, FLOAT128);
2660                     gen_helper_fitoq(cpu_fpr[rs2]);
2661                     gen_op_store_QT0_fpr(QFPREG(rd));
2662                     break;
2663                 case 0xcd: /* fstoq */
2664                     CHECK_FPU_FEATURE(dc, FLOAT128);
2665                     gen_helper_fstoq(cpu_fpr[rs2]);
2666                     gen_op_store_QT0_fpr(QFPREG(rd));
2667                     break;
2668                 case 0xce: /* fdtoq */
2669                     CHECK_FPU_FEATURE(dc, FLOAT128);
2670                     gen_op_load_fpr_DT1(DFPREG(rs2));
2671                     gen_helper_fdtoq();
2672                     gen_op_store_QT0_fpr(QFPREG(rd));
2673                     break;
2674                 case 0xd1: /* fstoi */
2675                     gen_clear_float_exceptions();
2676                     gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2677                     gen_helper_check_ieee_exceptions();
2678                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2679                     break;
2680                 case 0xd2: /* fdtoi */
2681                     gen_op_load_fpr_DT1(DFPREG(rs2));
2682                     gen_clear_float_exceptions();
2683                     gen_helper_fdtoi(cpu_tmp32);
2684                     gen_helper_check_ieee_exceptions();
2685                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2686                     break;
2687                 case 0xd3: /* fqtoi */
2688                     CHECK_FPU_FEATURE(dc, FLOAT128);
2689                     gen_op_load_fpr_QT1(QFPREG(rs2));
2690                     gen_clear_float_exceptions();
2691                     gen_helper_fqtoi(cpu_tmp32);
2692                     gen_helper_check_ieee_exceptions();
2693                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2694                     break;
2695 #ifdef TARGET_SPARC64
2696                 case 0x2: /* V9 fmovd */
2697                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2698                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2699                                     cpu_fpr[DFPREG(rs2) + 1]);
2700                     break;
2701                 case 0x3: /* V9 fmovq */
2702                     CHECK_FPU_FEATURE(dc, FLOAT128);
2703                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2704                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2705                                     cpu_fpr[QFPREG(rs2) + 1]);
2706                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2707                                     cpu_fpr[QFPREG(rs2) + 2]);
2708                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2709                                     cpu_fpr[QFPREG(rs2) + 3]);
2710                     break;
2711                 case 0x6: /* V9 fnegd */
2712                     gen_op_load_fpr_DT1(DFPREG(rs2));
2713                     gen_helper_fnegd();
2714                     gen_op_store_DT0_fpr(DFPREG(rd));
2715                     break;
2716                 case 0x7: /* V9 fnegq */
2717                     CHECK_FPU_FEATURE(dc, FLOAT128);
2718                     gen_op_load_fpr_QT1(QFPREG(rs2));
2719                     gen_helper_fnegq();
2720                     gen_op_store_QT0_fpr(QFPREG(rd));
2721                     break;
2722                 case 0xa: /* V9 fabsd */
2723                     gen_op_load_fpr_DT1(DFPREG(rs2));
2724                     gen_helper_fabsd();
2725                     gen_op_store_DT0_fpr(DFPREG(rd));
2726                     break;
2727                 case 0xb: /* V9 fabsq */
2728                     CHECK_FPU_FEATURE(dc, FLOAT128);
2729                     gen_op_load_fpr_QT1(QFPREG(rs2));
2730                     gen_helper_fabsq();
2731                     gen_op_store_QT0_fpr(QFPREG(rd));
2732                     break;
2733                 case 0x81: /* V9 fstox */
2734                     gen_clear_float_exceptions();
2735                     gen_helper_fstox(cpu_fpr[rs2]);
2736                     gen_helper_check_ieee_exceptions();
2737                     gen_op_store_DT0_fpr(DFPREG(rd));
2738                     break;
2739                 case 0x82: /* V9 fdtox */
2740                     gen_op_load_fpr_DT1(DFPREG(rs2));
2741                     gen_clear_float_exceptions();
2742                     gen_helper_fdtox();
2743                     gen_helper_check_ieee_exceptions();
2744                     gen_op_store_DT0_fpr(DFPREG(rd));
2745                     break;
2746                 case 0x83: /* V9 fqtox */
2747                     CHECK_FPU_FEATURE(dc, FLOAT128);
2748                     gen_op_load_fpr_QT1(QFPREG(rs2));
2749                     gen_clear_float_exceptions();
2750                     gen_helper_fqtox();
2751                     gen_helper_check_ieee_exceptions();
2752                     gen_op_store_DT0_fpr(DFPREG(rd));
2753                     break;
2754                 case 0x84: /* V9 fxtos */
2755                     gen_op_load_fpr_DT1(DFPREG(rs2));
2756                     gen_clear_float_exceptions();
2757                     gen_helper_fxtos(cpu_tmp32);
2758                     gen_helper_check_ieee_exceptions();
2759                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2760                     break;
2761                 case 0x88: /* V9 fxtod */
2762                     gen_op_load_fpr_DT1(DFPREG(rs2));
2763                     gen_clear_float_exceptions();
2764                     gen_helper_fxtod();
2765                     gen_helper_check_ieee_exceptions();
2766                     gen_op_store_DT0_fpr(DFPREG(rd));
2767                     break;
2768                 case 0x8c: /* V9 fxtoq */
2769                     CHECK_FPU_FEATURE(dc, FLOAT128);
2770                     gen_op_load_fpr_DT1(DFPREG(rs2));
2771                     gen_clear_float_exceptions();
2772                     gen_helper_fxtoq();
2773                     gen_helper_check_ieee_exceptions();
2774                     gen_op_store_QT0_fpr(QFPREG(rd));
2775                     break;
2776 #endif
2777                 default:
2778                     goto illegal_insn;
2779                 }
2780             } else if (xop == 0x35) {   /* FPU Operations */
2781 #ifdef TARGET_SPARC64
2782                 int cond;
2783 #endif
2784                 if (gen_trap_ifnofpu(dc, cpu_cond))
2785                     goto jmp_insn;
2786                 gen_op_clear_ieee_excp_and_FTT();
2787                 rs1 = GET_FIELD(insn, 13, 17);
2788                 rs2 = GET_FIELD(insn, 27, 31);
2789                 xop = GET_FIELD(insn, 18, 26);
2790 #ifdef TARGET_SPARC64
2791                 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2792                     int l1;
2793
2794                     l1 = gen_new_label();
2795                     cond = GET_FIELD_SP(insn, 14, 17);
2796                     cpu_src1 = get_src1(insn, cpu_src1);
2797                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2798                                        0, l1);
2799                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2800                     gen_set_label(l1);
2801                     break;
2802                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2803                     int l1;
2804
2805                     l1 = gen_new_label();
2806                     cond = GET_FIELD_SP(insn, 14, 17);
2807                     cpu_src1 = get_src1(insn, cpu_src1);
2808                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2809                                        0, l1);
2810                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2811                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2812                     gen_set_label(l1);
2813                     break;
2814                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2815                     int l1;
2816
2817                     CHECK_FPU_FEATURE(dc, FLOAT128);
2818                     l1 = gen_new_label();
2819                     cond = GET_FIELD_SP(insn, 14, 17);
2820                     cpu_src1 = get_src1(insn, cpu_src1);
2821                     tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2822                                        0, l1);
2823                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2824                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2825                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2826                     tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2827                     gen_set_label(l1);
2828                     break;
2829                 }
2830 #endif
2831                 switch (xop) {
2832 #ifdef TARGET_SPARC64
2833 #define FMOVSCC(fcc)                                                    \
2834                     {                                                   \
2835                         TCGv r_cond;                                    \
2836                         int l1;                                         \
2837                                                                         \
2838                         l1 = gen_new_label();                           \
2839                         r_cond = tcg_temp_new();                        \
2840                         cond = GET_FIELD_SP(insn, 14, 17);              \
2841                         gen_fcond(r_cond, fcc, cond);                   \
2842                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2843                                            0, l1);                      \
2844                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2845                         gen_set_label(l1);                              \
2846                         tcg_temp_free(r_cond);                          \
2847                     }
2848 #define FMOVDCC(fcc)                                                    \
2849                     {                                                   \
2850                         TCGv r_cond;                                    \
2851                         int l1;                                         \
2852                                                                         \
2853                         l1 = gen_new_label();                           \
2854                         r_cond = tcg_temp_new();                        \
2855                         cond = GET_FIELD_SP(insn, 14, 17);              \
2856                         gen_fcond(r_cond, fcc, cond);                   \
2857                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2858                                            0, l1);                      \
2859                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2860                                         cpu_fpr[DFPREG(rs2)]);          \
2861                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2862                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2863                         gen_set_label(l1);                              \
2864                         tcg_temp_free(r_cond);                          \
2865                     }
2866 #define FMOVQCC(fcc)                                                    \
2867                     {                                                   \
2868                         TCGv r_cond;                                    \
2869                         int l1;                                         \
2870                                                                         \
2871                         l1 = gen_new_label();                           \
2872                         r_cond = tcg_temp_new();                        \
2873                         cond = GET_FIELD_SP(insn, 14, 17);              \
2874                         gen_fcond(r_cond, fcc, cond);                   \
2875                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2876                                            0, l1);                      \
2877                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2878                                         cpu_fpr[QFPREG(rs2)]);          \
2879                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2880                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2881                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2882                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2883                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2884                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2885                         gen_set_label(l1);                              \
2886                         tcg_temp_free(r_cond);                          \
2887                     }
2888                     case 0x001: /* V9 fmovscc %fcc0 */
2889                         FMOVSCC(0);
2890                         break;
2891                     case 0x002: /* V9 fmovdcc %fcc0 */
2892                         FMOVDCC(0);
2893                         break;
2894                     case 0x003: /* V9 fmovqcc %fcc0 */
2895                         CHECK_FPU_FEATURE(dc, FLOAT128);
2896                         FMOVQCC(0);
2897                         break;
2898                     case 0x041: /* V9 fmovscc %fcc1 */
2899                         FMOVSCC(1);
2900                         break;
2901                     case 0x042: /* V9 fmovdcc %fcc1 */
2902                         FMOVDCC(1);
2903                         break;
2904                     case 0x043: /* V9 fmovqcc %fcc1 */
2905                         CHECK_FPU_FEATURE(dc, FLOAT128);
2906                         FMOVQCC(1);
2907                         break;
2908                     case 0x081: /* V9 fmovscc %fcc2 */
2909                         FMOVSCC(2);
2910                         break;
2911                     case 0x082: /* V9 fmovdcc %fcc2 */
2912                         FMOVDCC(2);
2913                         break;
2914                     case 0x083: /* V9 fmovqcc %fcc2 */
2915                         CHECK_FPU_FEATURE(dc, FLOAT128);
2916                         FMOVQCC(2);
2917                         break;
2918                     case 0x0c1: /* V9 fmovscc %fcc3 */
2919                         FMOVSCC(3);
2920                         break;
2921                     case 0x0c2: /* V9 fmovdcc %fcc3 */
2922                         FMOVDCC(3);
2923                         break;
2924                     case 0x0c3: /* V9 fmovqcc %fcc3 */
2925                         CHECK_FPU_FEATURE(dc, FLOAT128);
2926                         FMOVQCC(3);
2927                         break;
2928 #undef FMOVSCC
2929 #undef FMOVDCC
2930 #undef FMOVQCC
2931 #define FMOVSCC(icc)                                                    \
2932                     {                                                   \
2933                         TCGv r_cond;                                    \
2934                         int l1;                                         \
2935                                                                         \
2936                         l1 = gen_new_label();                           \
2937                         r_cond = tcg_temp_new();                        \
2938                         cond = GET_FIELD_SP(insn, 14, 17);              \
2939                         gen_cond(r_cond, icc, cond, dc);                \
2940                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2941                                            0, l1);                      \
2942                         tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);     \
2943                         gen_set_label(l1);                              \
2944                         tcg_temp_free(r_cond);                          \
2945                     }
2946 #define FMOVDCC(icc)                                                    \
2947                     {                                                   \
2948                         TCGv r_cond;                                    \
2949                         int l1;                                         \
2950                                                                         \
2951                         l1 = gen_new_label();                           \
2952                         r_cond = tcg_temp_new();                        \
2953                         cond = GET_FIELD_SP(insn, 14, 17);              \
2954                         gen_cond(r_cond, icc, cond, dc);                \
2955                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2956                                            0, l1);                      \
2957                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],            \
2958                                         cpu_fpr[DFPREG(rs2)]);          \
2959                         tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],        \
2960                                         cpu_fpr[DFPREG(rs2) + 1]);      \
2961                         gen_set_label(l1);                              \
2962                         tcg_temp_free(r_cond);                          \
2963                     }
2964 #define FMOVQCC(icc)                                                    \
2965                     {                                                   \
2966                         TCGv r_cond;                                    \
2967                         int l1;                                         \
2968                                                                         \
2969                         l1 = gen_new_label();                           \
2970                         r_cond = tcg_temp_new();                        \
2971                         cond = GET_FIELD_SP(insn, 14, 17);              \
2972                         gen_cond(r_cond, icc, cond, dc);                \
2973                         tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond,         \
2974                                            0, l1);                      \
2975                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],            \
2976                                         cpu_fpr[QFPREG(rs2)]);          \
2977                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],        \
2978                                         cpu_fpr[QFPREG(rs2) + 1]);      \
2979                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],        \
2980                                         cpu_fpr[QFPREG(rs2) + 2]);      \
2981                         tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],        \
2982                                         cpu_fpr[QFPREG(rs2) + 3]);      \
2983                         gen_set_label(l1);                              \
2984                         tcg_temp_free(r_cond);                          \
2985                     }
2986
2987                     case 0x101: /* V9 fmovscc %icc */
2988                         FMOVSCC(0);
2989                         break;
2990                     case 0x102: /* V9 fmovdcc %icc */
2991                         FMOVDCC(0);
2992                     case 0x103: /* V9 fmovqcc %icc */
2993                         CHECK_FPU_FEATURE(dc, FLOAT128);
2994                         FMOVQCC(0);
2995                         break;
2996                     case 0x181: /* V9 fmovscc %xcc */
2997                         FMOVSCC(1);
2998                         break;
2999                     case 0x182: /* V9 fmovdcc %xcc */
3000                         FMOVDCC(1);
3001                         break;
3002                     case 0x183: /* V9 fmovqcc %xcc */
3003                         CHECK_FPU_FEATURE(dc, FLOAT128);
3004                         FMOVQCC(1);
3005                         break;
3006 #undef FMOVSCC
3007 #undef FMOVDCC
3008 #undef FMOVQCC
3009 #endif
3010                     case 0x51: /* fcmps, V9 %fcc */
3011                         gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3012                         break;
3013                     case 0x52: /* fcmpd, V9 %fcc */
3014                         gen_op_load_fpr_DT0(DFPREG(rs1));
3015                         gen_op_load_fpr_DT1(DFPREG(rs2));
3016                         gen_op_fcmpd(rd & 3);
3017                         break;
3018                     case 0x53: /* fcmpq, V9 %fcc */
3019                         CHECK_FPU_FEATURE(dc, FLOAT128);
3020                         gen_op_load_fpr_QT0(QFPREG(rs1));
3021                         gen_op_load_fpr_QT1(QFPREG(rs2));
3022                         gen_op_fcmpq(rd & 3);
3023                         break;
3024                     case 0x55: /* fcmpes, V9 %fcc */
3025                         gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3026                         break;
3027                     case 0x56: /* fcmped, V9 %fcc */
3028                         gen_op_load_fpr_DT0(DFPREG(rs1));
3029                         gen_op_load_fpr_DT1(DFPREG(rs2));
3030                         gen_op_fcmped(rd & 3);
3031                         break;
3032                     case 0x57: /* fcmpeq, V9 %fcc */
3033                         CHECK_FPU_FEATURE(dc, FLOAT128);
3034                         gen_op_load_fpr_QT0(QFPREG(rs1));
3035                         gen_op_load_fpr_QT1(QFPREG(rs2));
3036                         gen_op_fcmpeq(rd & 3);
3037                         break;
3038                     default:
3039                         goto illegal_insn;
3040                 }
3041             } else if (xop == 0x2) {
3042                 // clr/mov shortcut
3043
3044                 rs1 = GET_FIELD(insn, 13, 17);
3045                 if (rs1 == 0) {
3046                     // or %g0, x, y -> mov T0, x; mov y, T0
3047                     if (IS_IMM) {       /* immediate */
3048                         TCGv r_const;
3049
3050                         simm = GET_FIELDs(insn, 19, 31);
3051                         r_const = tcg_const_tl(simm);
3052                         gen_movl_TN_reg(rd, r_const);
3053                         tcg_temp_free(r_const);
3054                     } else {            /* register */
3055                         rs2 = GET_FIELD(insn, 27, 31);
3056                         gen_movl_reg_TN(rs2, cpu_dst);
3057                         gen_movl_TN_reg(rd, cpu_dst);
3058                     }
3059                 } else {
3060                     cpu_src1 = get_src1(insn, cpu_src1);
3061                     if (IS_IMM) {       /* immediate */
3062                         simm = GET_FIELDs(insn, 19, 31);
3063                         tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3064                         gen_movl_TN_reg(rd, cpu_dst);
3065                     } else {            /* register */
3066                         // or x, %g0, y -> mov T1, x; mov y, T1
3067                         rs2 = GET_FIELD(insn, 27, 31);
3068                         if (rs2 != 0) {
3069                             gen_movl_reg_TN(rs2, cpu_src2);
3070                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3071                             gen_movl_TN_reg(rd, cpu_dst);
3072                         } else
3073                             gen_movl_TN_reg(rd, cpu_src1);
3074                     }
3075                 }
3076 #ifdef TARGET_SPARC64
3077             } else if (xop == 0x25) { /* sll, V9 sllx */
3078                 cpu_src1 = get_src1(insn, cpu_src1);
3079                 if (IS_IMM) {   /* immediate */
3080                     simm = GET_FIELDs(insn, 20, 31);
3081                     if (insn & (1 << 12)) {
3082                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3083                     } else {
3084                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3085                     }
3086                 } else {                /* register */
3087                     rs2 = GET_FIELD(insn, 27, 31);
3088                     gen_movl_reg_TN(rs2, cpu_src2);
3089                     if (insn & (1 << 12)) {
3090                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3091                     } else {
3092                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3093                     }
3094                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3095                 }
3096                 gen_movl_TN_reg(rd, cpu_dst);
3097             } else if (xop == 0x26) { /* srl, V9 srlx */
3098                 cpu_src1 = get_src1(insn, cpu_src1);
3099                 if (IS_IMM) {   /* immediate */
3100                     simm = GET_FIELDs(insn, 20, 31);
3101                     if (insn & (1 << 12)) {
3102                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3103                     } else {
3104                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3105                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3106                     }
3107                 } else {                /* register */
3108                     rs2 = GET_FIELD(insn, 27, 31);
3109                     gen_movl_reg_TN(rs2, cpu_src2);
3110                     if (insn & (1 << 12)) {
3111                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3112                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3113                     } else {
3114                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3115                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3116                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3117                     }
3118                 }
3119                 gen_movl_TN_reg(rd, cpu_dst);
3120             } else if (xop == 0x27) { /* sra, V9 srax */
3121                 cpu_src1 = get_src1(insn, cpu_src1);
3122                 if (IS_IMM) {   /* immediate */
3123                     simm = GET_FIELDs(insn, 20, 31);
3124                     if (insn & (1 << 12)) {
3125                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3126                     } else {
3127                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3128                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3129                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3130                     }
3131                 } else {                /* register */
3132                     rs2 = GET_FIELD(insn, 27, 31);
3133                     gen_movl_reg_TN(rs2, cpu_src2);
3134                     if (insn & (1 << 12)) {
3135                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3136                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3137                     } else {
3138                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3139                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3140                         tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3141                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3142                     }
3143                 }
3144                 gen_movl_TN_reg(rd, cpu_dst);
3145 #endif
3146             } else if (xop < 0x36) {
3147                 if (xop < 0x20) {
3148                     cpu_src1 = get_src1(insn, cpu_src1);
3149                     cpu_src2 = get_src2(insn, cpu_src2);
3150                     switch (xop & ~0x10) {
3151                     case 0x0: /* add */
3152                         if (IS_IMM) {
3153                             simm = GET_FIELDs(insn, 19, 31);
3154                             if (xop & 0x10) {
3155                                 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3156                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3157                                 dc->cc_op = CC_OP_FLAGS;
3158                             } else {
3159                                 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3160                             }
3161                         } else {
3162                             if (xop & 0x10) {
3163                                 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3164                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3165                                 dc->cc_op = CC_OP_FLAGS;
3166                             } else {
3167                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3168                             }
3169                         }
3170                         break;
3171                     case 0x1: /* and */
3172                         if (IS_IMM) {
3173                             simm = GET_FIELDs(insn, 19, 31);
3174                             tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3175                         } else {
3176                             tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3177                         }
3178                         if (xop & 0x10) {
3179                             gen_op_logic_cc(cpu_dst);
3180                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3181                             dc->cc_op = CC_OP_FLAGS;
3182                         }
3183                         break;
3184                     case 0x2: /* or */
3185                         if (IS_IMM) {
3186                             simm = GET_FIELDs(insn, 19, 31);
3187                             tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3188                         } else {
3189                             tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3190                         }
3191                         if (xop & 0x10) {
3192                             gen_op_logic_cc(cpu_dst);
3193                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3194                             dc->cc_op = CC_OP_FLAGS;
3195                         }
3196                         break;
3197                     case 0x3: /* xor */
3198                         if (IS_IMM) {
3199                             simm = GET_FIELDs(insn, 19, 31);
3200                             tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3201                         } else {
3202                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3203                         }
3204                         if (xop & 0x10) {
3205                             gen_op_logic_cc(cpu_dst);
3206                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3207                             dc->cc_op = CC_OP_FLAGS;
3208                         }
3209                         break;
3210                     case 0x4: /* sub */
3211                         if (IS_IMM) {
3212                             simm = GET_FIELDs(insn, 19, 31);
3213                             if (xop & 0x10) {
3214                                 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3215                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3216                                 dc->cc_op = CC_OP_FLAGS;
3217                             } else {
3218                                 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3219                             }
3220                         } else {
3221                             if (xop & 0x10) {
3222                                 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3223                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3224                                 dc->cc_op = CC_OP_FLAGS;
3225                             } else {
3226                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3227                             }
3228                         }
3229                         break;
3230                     case 0x5: /* andn */
3231                         if (IS_IMM) {
3232                             simm = GET_FIELDs(insn, 19, 31);
3233                             tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3234                         } else {
3235                             tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3236                         }
3237                         if (xop & 0x10) {
3238                             gen_op_logic_cc(cpu_dst);
3239                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3240                             dc->cc_op = CC_OP_FLAGS;
3241                         }
3242                         break;
3243                     case 0x6: /* orn */
3244                         if (IS_IMM) {
3245                             simm = GET_FIELDs(insn, 19, 31);
3246                             tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3247                         } else {
3248                             tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3249                         }
3250                         if (xop & 0x10) {
3251                             gen_op_logic_cc(cpu_dst);
3252                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3253                             dc->cc_op = CC_OP_FLAGS;
3254                         }
3255                         break;
3256                     case 0x7: /* xorn */
3257                         if (IS_IMM) {
3258                             simm = GET_FIELDs(insn, 19, 31);
3259                             tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3260                         } else {
3261                             tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3262                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3263                         }
3264                         if (xop & 0x10) {
3265                             gen_op_logic_cc(cpu_dst);
3266                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3267                             dc->cc_op = CC_OP_FLAGS;
3268                         }
3269                         break;
3270                     case 0x8: /* addx, V9 addc */
3271                         if (IS_IMM) {
3272                             simm = GET_FIELDs(insn, 19, 31);
3273                             if (xop & 0x10) {
3274                                 gen_helper_compute_psr();
3275                                 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3276                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3277                                 dc->cc_op = CC_OP_FLAGS;
3278                             } else {
3279                                 gen_helper_compute_psr();
3280                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3281                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3282                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3283                             }
3284                         } else {
3285                             if (xop & 0x10) {
3286                                 gen_helper_compute_psr();
3287                                 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3288                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3289                                 dc->cc_op = CC_OP_FLAGS;
3290                             } else {
3291                                 gen_helper_compute_psr();
3292                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3293                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3294                                 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3295                             }
3296                         }
3297                         break;
3298 #ifdef TARGET_SPARC64
3299                     case 0x9: /* V9 mulx */
3300                         if (IS_IMM) {
3301                             simm = GET_FIELDs(insn, 19, 31);
3302                             tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3303                         } else {
3304                             tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3305                         }
3306                         break;
3307 #endif
3308                     case 0xa: /* umul */
3309                         CHECK_IU_FEATURE(dc, MUL);
3310                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3311                         if (xop & 0x10) {
3312                             gen_op_logic_cc(cpu_dst);
3313                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3314                             dc->cc_op = CC_OP_FLAGS;
3315                         }
3316                         break;
3317                     case 0xb: /* smul */
3318                         CHECK_IU_FEATURE(dc, MUL);
3319                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3320                         if (xop & 0x10) {
3321                             gen_op_logic_cc(cpu_dst);
3322                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3323                             dc->cc_op = CC_OP_FLAGS;
3324                         }
3325                         break;
3326                     case 0xc: /* subx, V9 subc */
3327                         if (IS_IMM) {
3328                             simm = GET_FIELDs(insn, 19, 31);
3329                             if (xop & 0x10) {
3330                                 gen_helper_compute_psr();
3331                                 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3332                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3333                                 dc->cc_op = CC_OP_FLAGS;
3334                             } else {
3335                                 gen_helper_compute_psr();
3336                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3337                                 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3338                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3339                             }
3340                         } else {
3341                             if (xop & 0x10) {
3342                                 gen_helper_compute_psr();
3343                                 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3344                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3345                                 dc->cc_op = CC_OP_FLAGS;
3346                             } else {
3347                                 gen_helper_compute_psr();
3348                                 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3349                                 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3350                                 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3351                             }
3352                         }
3353                         break;
3354 #ifdef TARGET_SPARC64
3355                     case 0xd: /* V9 udivx */
3356                         tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3357                         tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3358                         gen_trap_ifdivzero_tl(cpu_cc_src2);
3359                         tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3360                         break;
3361 #endif
3362                     case 0xe: /* udiv */
3363                         CHECK_IU_FEATURE(dc, DIV);
3364                         gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3365                         if (xop & 0x10) {
3366                             gen_op_div_cc(cpu_dst);
3367                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3368                             dc->cc_op = CC_OP_FLAGS;
3369                         }
3370                         break;
3371                     case 0xf: /* sdiv */
3372                         CHECK_IU_FEATURE(dc, DIV);
3373                         gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3374                         if (xop & 0x10) {
3375                             gen_op_div_cc(cpu_dst);
3376                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3377                             dc->cc_op = CC_OP_FLAGS;
3378                         }
3379                         break;
3380                     default:
3381                         goto illegal_insn;
3382                     }
3383                     gen_movl_TN_reg(rd, cpu_dst);
3384                 } else {
3385                     cpu_src1 = get_src1(insn, cpu_src1);
3386                     cpu_src2 = get_src2(insn, cpu_src2);
3387                     switch (xop) {
3388                     case 0x20: /* taddcc */
3389                         gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3390                         gen_movl_TN_reg(rd, cpu_dst);
3391                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3392                         dc->cc_op = CC_OP_FLAGS;
3393                         break;
3394                     case 0x21: /* tsubcc */
3395                         gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3396                         gen_movl_TN_reg(rd, cpu_dst);
3397                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3398                         dc->cc_op = CC_OP_FLAGS;
3399                         break;
3400                     case 0x22: /* taddcctv */
3401                         save_state(dc, cpu_cond);
3402                         gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3403                         gen_movl_TN_reg(rd, cpu_dst);
3404                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3405                         dc->cc_op = CC_OP_FLAGS;
3406                         break;
3407                     case 0x23: /* tsubcctv */
3408                         save_state(dc, cpu_cond);
3409                         gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3410                         gen_movl_TN_reg(rd, cpu_dst);
3411                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3412                         dc->cc_op = CC_OP_FLAGS;
3413                         break;
3414                     case 0x24: /* mulscc */
3415                         gen_helper_compute_psr();
3416                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3417                         gen_movl_TN_reg(rd, cpu_dst);
3418                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3419                         dc->cc_op = CC_OP_FLAGS;
3420                         break;
3421 #ifndef TARGET_SPARC64
3422                     case 0x25:  /* sll */
3423                         if (IS_IMM) { /* immediate */
3424                             simm = GET_FIELDs(insn, 20, 31);
3425                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3426                         } else { /* register */
3427                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3428                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3429                         }
3430                         gen_movl_TN_reg(rd, cpu_dst);
3431                         break;
3432                     case 0x26:  /* srl */
3433                         if (IS_IMM) { /* immediate */
3434                             simm = GET_FIELDs(insn, 20, 31);
3435                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3436                         } else { /* register */
3437                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3438                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3439                         }
3440                         gen_movl_TN_reg(rd, cpu_dst);
3441                         break;
3442                     case 0x27:  /* sra */
3443                         if (IS_IMM) { /* immediate */
3444                             simm = GET_FIELDs(insn, 20, 31);
3445                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3446                         } else { /* register */
3447                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3448                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3449                         }
3450                         gen_movl_TN_reg(rd, cpu_dst);
3451                         break;
3452 #endif
3453                     case 0x30:
3454                         {
3455                             switch(rd) {
3456                             case 0: /* wry */
3457                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3458                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3459                                 break;
3460 #ifndef TARGET_SPARC64
3461                             case 0x01 ... 0x0f: /* undefined in the
3462                                                    SPARCv8 manual, nop
3463                                                    on the microSPARC
3464                                                    II */
3465                             case 0x10 ... 0x1f: /* implementation-dependent
3466                                                    in the SPARCv8
3467                                                    manual, nop on the
3468                                                    microSPARC II */
3469                                 break;
3470 #else
3471                             case 0x2: /* V9 wrccr */
3472                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3473                                 gen_helper_wrccr(cpu_dst);
3474                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3475                                 dc->cc_op = CC_OP_FLAGS;
3476                                 break;
3477                             case 0x3: /* V9 wrasi */
3478                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3479                                 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3480                                 break;
3481                             case 0x6: /* V9 wrfprs */
3482                                 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3483                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3484                                 save_state(dc, cpu_cond);
3485                                 gen_op_next_insn();
3486                                 tcg_gen_exit_tb(0);
3487                                 dc->is_br = 1;
3488                                 break;
3489                             case 0xf: /* V9 sir, nop if user */
3490 #if !defined(CONFIG_USER_ONLY)
3491                                 if (supervisor(dc))
3492                                     ; // XXX
3493 #endif
3494                                 break;
3495                             case 0x13: /* Graphics Status */
3496                                 if (gen_trap_ifnofpu(dc, cpu_cond))
3497                                     goto jmp_insn;
3498                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3499                                 break;
3500                             case 0x14: /* Softint set */
3501                                 if (!supervisor(dc))
3502                                     goto illegal_insn;
3503                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3504                                 gen_helper_set_softint(cpu_tmp64);
3505                                 break;
3506                             case 0x15: /* Softint clear */
3507                                 if (!supervisor(dc))
3508                                     goto illegal_insn;
3509                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3510                                 gen_helper_clear_softint(cpu_tmp64);
3511                                 break;
3512                             case 0x16: /* Softint write */
3513                                 if (!supervisor(dc))
3514                                     goto illegal_insn;
3515                                 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3516                                 gen_helper_write_softint(cpu_tmp64);
3517                                 break;
3518                             case 0x17: /* Tick compare */
3519 #if !defined(CONFIG_USER_ONLY)
3520                                 if (!supervisor(dc))
3521                                     goto illegal_insn;
3522 #endif
3523                                 {
3524                                     TCGv_ptr r_tickptr;
3525
3526                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3527                                                    cpu_src2);
3528                                     r_tickptr = tcg_temp_new_ptr();
3529                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3530                                                    offsetof(CPUState, tick));
3531                                     gen_helper_tick_set_limit(r_tickptr,
3532                                                               cpu_tick_cmpr);
3533                                     tcg_temp_free_ptr(r_tickptr);
3534                                 }
3535                                 break;
3536                             case 0x18: /* System tick */
3537 #if !defined(CONFIG_USER_ONLY)
3538                                 if (!supervisor(dc))
3539                                     goto illegal_insn;
3540 #endif
3541                                 {
3542                                     TCGv_ptr r_tickptr;
3543
3544                                     tcg_gen_xor_tl(cpu_dst, cpu_src1,
3545                                                    cpu_src2);
3546                                     r_tickptr = tcg_temp_new_ptr();
3547                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3548                                                    offsetof(CPUState, stick));
3549                                     gen_helper_tick_set_count(r_tickptr,
3550                                                               cpu_dst);
3551                                     tcg_temp_free_ptr(r_tickptr);
3552                                 }
3553                                 break;
3554                             case 0x19: /* System tick compare */
3555 #if !defined(CONFIG_USER_ONLY)
3556                                 if (!supervisor(dc))
3557                                     goto illegal_insn;
3558 #endif
3559                                 {
3560                                     TCGv_ptr r_tickptr;
3561
3562                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3563                                                    cpu_src2);
3564                                     r_tickptr = tcg_temp_new_ptr();
3565                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3566                                                    offsetof(CPUState, stick));
3567                                     gen_helper_tick_set_limit(r_tickptr,
3568                                                               cpu_stick_cmpr);
3569                                     tcg_temp_free_ptr(r_tickptr);
3570                                 }
3571                                 break;
3572
3573                             case 0x10: /* Performance Control */
3574                             case 0x11: /* Performance Instrumentation
3575                                           Counter */
3576                             case 0x12: /* Dispatch Control */
3577 #endif
3578                             default:
3579                                 goto illegal_insn;
3580                             }
3581                         }
3582                         break;
3583 #if !defined(CONFIG_USER_ONLY)
3584                     case 0x31: /* wrpsr, V9 saved, restored */
3585                         {
3586                             if (!supervisor(dc))
3587                                 goto priv_insn;
3588 #ifdef TARGET_SPARC64
3589                             switch (rd) {
3590                             case 0:
3591                                 gen_helper_saved();
3592                                 break;
3593                             case 1:
3594                                 gen_helper_restored();
3595                                 break;
3596                             case 2: /* UA2005 allclean */
3597                             case 3: /* UA2005 otherw */
3598                             case 4: /* UA2005 normalw */
3599                             case 5: /* UA2005 invalw */
3600                                 // XXX
3601                             default:
3602                                 goto illegal_insn;
3603                             }
3604 #else
3605                             tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3606                             gen_helper_wrpsr(cpu_dst);
3607                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3608                             dc->cc_op = CC_OP_FLAGS;
3609                             save_state(dc, cpu_cond);
3610                             gen_op_next_insn();
3611                             tcg_gen_exit_tb(0);
3612                             dc->is_br = 1;
3613 #endif
3614                         }
3615                         break;
3616                     case 0x32: /* wrwim, V9 wrpr */
3617                         {
3618                             if (!supervisor(dc))
3619                                 goto priv_insn;
3620                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3621 #ifdef TARGET_SPARC64
3622                             switch (rd) {
3623                             case 0: // tpc
3624                                 {
3625                                     TCGv_ptr r_tsptr;
3626
3627                                     r_tsptr = tcg_temp_new_ptr();
3628                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3629                                                    offsetof(CPUState, tsptr));
3630                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3631                                                   offsetof(trap_state, tpc));
3632                                     tcg_temp_free_ptr(r_tsptr);
3633                                 }
3634                                 break;
3635                             case 1: // tnpc
3636                                 {
3637                                     TCGv_ptr r_tsptr;
3638
3639                                     r_tsptr = tcg_temp_new_ptr();
3640                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3641                                                    offsetof(CPUState, tsptr));
3642                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3643                                                   offsetof(trap_state, tnpc));
3644                                     tcg_temp_free_ptr(r_tsptr);
3645                                 }
3646                                 break;
3647                             case 2: // tstate
3648                                 {
3649                                     TCGv_ptr r_tsptr;
3650
3651                                     r_tsptr = tcg_temp_new_ptr();
3652                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3653                                                    offsetof(CPUState, tsptr));
3654                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3655                                                   offsetof(trap_state,
3656                                                            tstate));
3657                                     tcg_temp_free_ptr(r_tsptr);
3658                                 }
3659                                 break;
3660                             case 3: // tt
3661                                 {
3662                                     TCGv_ptr r_tsptr;
3663
3664                                     r_tsptr = tcg_temp_new_ptr();
3665                                     tcg_gen_ld_ptr(r_tsptr, cpu_env,
3666                                                    offsetof(CPUState, tsptr));
3667                                     tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3668                                     tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3669                                                    offsetof(trap_state, tt));
3670                                     tcg_temp_free_ptr(r_tsptr);
3671                                 }
3672                                 break;
3673                             case 4: // tick
3674                                 {
3675                                     TCGv_ptr r_tickptr;
3676
3677                                     r_tickptr = tcg_temp_new_ptr();
3678                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3679                                                    offsetof(CPUState, tick));
3680                                     gen_helper_tick_set_count(r_tickptr,
3681                                                               cpu_tmp0);
3682                                     tcg_temp_free_ptr(r_tickptr);
3683                                 }
3684                                 break;
3685                             case 5: // tba
3686                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3687                                 break;
3688                             case 6: // pstate
3689                                 save_state(dc, cpu_cond);
3690                                 gen_helper_wrpstate(cpu_tmp0);
3691                                 gen_op_next_insn();
3692                                 tcg_gen_exit_tb(0);
3693                                 dc->is_br = 1;
3694                                 break;
3695                             case 7: // tl
3696                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3697                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3698                                                offsetof(CPUSPARCState, tl));
3699                                 break;
3700                             case 8: // pil
3701                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3702                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3703                                                offsetof(CPUSPARCState,
3704                                                         psrpil));
3705                                 break;
3706                             case 9: // cwp
3707                                 gen_helper_wrcwp(cpu_tmp0);
3708                                 break;
3709                             case 10: // cansave
3710                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3711                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3712                                                offsetof(CPUSPARCState,
3713                                                         cansave));
3714                                 break;
3715                             case 11: // canrestore
3716                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3717                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3718                                                offsetof(CPUSPARCState,
3719                                                         canrestore));
3720                                 break;
3721                             case 12: // cleanwin
3722                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3723                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3724                                                offsetof(CPUSPARCState,
3725                                                         cleanwin));
3726                                 break;
3727                             case 13: // otherwin
3728                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3729                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3730                                                offsetof(CPUSPARCState,
3731                                                         otherwin));
3732                                 break;
3733                             case 14: // wstate
3734                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3735                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3736                                                offsetof(CPUSPARCState,
3737                                                         wstate));
3738                                 break;
3739                             case 16: // UA2005 gl
3740                                 CHECK_IU_FEATURE(dc, GL);
3741                                 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3742                                 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3743                                                offsetof(CPUSPARCState, gl));
3744                                 break;
3745                             case 26: // UA2005 strand status
3746                                 CHECK_IU_FEATURE(dc, HYPV);
3747                                 if (!hypervisor(dc))
3748                                     goto priv_insn;
3749                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3750                                 break;
3751                             default:
3752                                 goto illegal_insn;
3753                             }
3754 #else
3755                             tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3756                             if (dc->def->nwindows != 32)
3757                                 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3758                                                 (1 << dc->def->nwindows) - 1);
3759                             tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3760 #endif
3761                         }
3762                         break;
3763                     case 0x33: /* wrtbr, UA2005 wrhpr */
3764                         {
3765 #ifndef TARGET_SPARC64
3766                             if (!supervisor(dc))
3767                                 goto priv_insn;
3768                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3769 #else
3770                             CHECK_IU_FEATURE(dc, HYPV);
3771                             if (!hypervisor(dc))
3772                                 goto priv_insn;
3773                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3774                             switch (rd) {
3775                             case 0: // hpstate
3776                                 // XXX gen_op_wrhpstate();
3777                                 save_state(dc, cpu_cond);
3778                                 gen_op_next_insn();
3779                                 tcg_gen_exit_tb(0);
3780                                 dc->is_br = 1;
3781                                 break;
3782                             case 1: // htstate
3783                                 // XXX gen_op_wrhtstate();
3784                                 break;
3785                             case 3: // hintp
3786                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3787                                 break;
3788                             case 5: // htba
3789                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3790                                 break;
3791                             case 31: // hstick_cmpr
3792                                 {
3793                                     TCGv_ptr r_tickptr;
3794
3795                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3796                                     r_tickptr = tcg_temp_new_ptr();
3797                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
3798                                                    offsetof(CPUState, hstick));
3799                                     gen_helper_tick_set_limit(r_tickptr,
3800                                                               cpu_hstick_cmpr);
3801                                     tcg_temp_free_ptr(r_tickptr);
3802                                 }
3803                                 break;
3804                             case 6: // hver readonly
3805                             default:
3806                                 goto illegal_insn;
3807                             }
3808 #endif
3809                         }
3810                         break;
3811 #endif
3812 #ifdef TARGET_SPARC64
3813                     case 0x2c: /* V9 movcc */
3814                         {
3815                             int cc = GET_FIELD_SP(insn, 11, 12);
3816                             int cond = GET_FIELD_SP(insn, 14, 17);
3817                             TCGv r_cond;
3818                             int l1;
3819
3820                             r_cond = tcg_temp_new();
3821                             if (insn & (1 << 18)) {
3822                                 if (cc == 0)
3823                                     gen_cond(r_cond, 0, cond, dc);
3824                                 else if (cc == 2)
3825                                     gen_cond(r_cond, 1, cond, dc);
3826                                 else
3827                                     goto illegal_insn;
3828                             } else {
3829                                 gen_fcond(r_cond, cc, cond);
3830                             }
3831
3832                             l1 = gen_new_label();
3833
3834                             tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3835                             if (IS_IMM) {       /* immediate */
3836                                 TCGv r_const;
3837
3838                                 simm = GET_FIELD_SPs(insn, 0, 10);
3839                                 r_const = tcg_const_tl(simm);
3840                                 gen_movl_TN_reg(rd, r_const);
3841                                 tcg_temp_free(r_const);
3842                             } else {
3843                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3844                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3845                                 gen_movl_TN_reg(rd, cpu_tmp0);
3846                             }
3847                             gen_set_label(l1);
3848                             tcg_temp_free(r_cond);
3849                             break;
3850                         }
3851                     case 0x2d: /* V9 sdivx */
3852                         gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3853                         gen_movl_TN_reg(rd, cpu_dst);
3854                         break;
3855                     case 0x2e: /* V9 popc */
3856                         {
3857                             cpu_src2 = get_src2(insn, cpu_src2);
3858                             gen_helper_popc(cpu_dst, cpu_src2);
3859                             gen_movl_TN_reg(rd, cpu_dst);
3860                         }
3861                     case 0x2f: /* V9 movr */
3862                         {
3863                             int cond = GET_FIELD_SP(insn, 10, 12);
3864                             int l1;
3865
3866                             cpu_src1 = get_src1(insn, cpu_src1);
3867
3868                             l1 = gen_new_label();
3869
3870                             tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3871                                               cpu_src1, 0, l1);
3872                             if (IS_IMM) {       /* immediate */
3873                                 TCGv r_const;
3874
3875                                 simm = GET_FIELD_SPs(insn, 0, 9);
3876                                 r_const = tcg_const_tl(simm);
3877                                 gen_movl_TN_reg(rd, r_const);
3878                                 tcg_temp_free(r_const);
3879                             } else {
3880                                 rs2 = GET_FIELD_SP(insn, 0, 4);
3881                                 gen_movl_reg_TN(rs2, cpu_tmp0);
3882                                 gen_movl_TN_reg(rd, cpu_tmp0);
3883                             }
3884                             gen_set_label(l1);
3885                             break;
3886                         }
3887 #endif
3888                     default:
3889                         goto illegal_insn;
3890                     }
3891                 }
3892             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3893 #ifdef TARGET_SPARC64
3894                 int opf = GET_FIELD_SP(insn, 5, 13);
3895                 rs1 = GET_FIELD(insn, 13, 17);
3896                 rs2 = GET_FIELD(insn, 27, 31);
3897                 if (gen_trap_ifnofpu(dc, cpu_cond))
3898                     goto jmp_insn;
3899
3900                 switch (opf) {
3901                 case 0x000: /* VIS I edge8cc */
3902                 case 0x001: /* VIS II edge8n */
3903                 case 0x002: /* VIS I edge8lcc */
3904                 case 0x003: /* VIS II edge8ln */
3905                 case 0x004: /* VIS I edge16cc */
3906                 case 0x005: /* VIS II edge16n */
3907                 case 0x006: /* VIS I edge16lcc */
3908                 case 0x007: /* VIS II edge16ln */
3909                 case 0x008: /* VIS I edge32cc */
3910                 case 0x009: /* VIS II edge32n */
3911                 case 0x00a: /* VIS I edge32lcc */
3912                 case 0x00b: /* VIS II edge32ln */
3913                     // XXX
3914                     goto illegal_insn;
3915                 case 0x010: /* VIS I array8 */
3916                     CHECK_FPU_FEATURE(dc, VIS1);
3917                     cpu_src1 = get_src1(insn, cpu_src1);
3918                     gen_movl_reg_TN(rs2, cpu_src2);
3919                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3920                     gen_movl_TN_reg(rd, cpu_dst);
3921                     break;
3922                 case 0x012: /* VIS I array16 */
3923                     CHECK_FPU_FEATURE(dc, VIS1);
3924                     cpu_src1 = get_src1(insn, cpu_src1);
3925                     gen_movl_reg_TN(rs2, cpu_src2);
3926                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3927                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3928                     gen_movl_TN_reg(rd, cpu_dst);
3929                     break;
3930                 case 0x014: /* VIS I array32 */
3931                     CHECK_FPU_FEATURE(dc, VIS1);
3932                     cpu_src1 = get_src1(insn, cpu_src1);
3933                     gen_movl_reg_TN(rs2, cpu_src2);
3934                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3935                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3936                     gen_movl_TN_reg(rd, cpu_dst);
3937                     break;
3938                 case 0x018: /* VIS I alignaddr */
3939                     CHECK_FPU_FEATURE(dc, VIS1);
3940                     cpu_src1 = get_src1(insn, cpu_src1);
3941                     gen_movl_reg_TN(rs2, cpu_src2);
3942                     gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3943                     gen_movl_TN_reg(rd, cpu_dst);
3944                     break;
3945                 case 0x019: /* VIS II bmask */
3946                 case 0x01a: /* VIS I alignaddrl */
3947                     // XXX
3948                     goto illegal_insn;
3949                 case 0x020: /* VIS I fcmple16 */
3950                     CHECK_FPU_FEATURE(dc, VIS1);
3951                     gen_op_load_fpr_DT0(DFPREG(rs1));
3952                     gen_op_load_fpr_DT1(DFPREG(rs2));
3953                     gen_helper_fcmple16();
3954                     gen_op_store_DT0_fpr(DFPREG(rd));
3955                     break;
3956                 case 0x022: /* VIS I fcmpne16 */
3957                     CHECK_FPU_FEATURE(dc, VIS1);
3958                     gen_op_load_fpr_DT0(DFPREG(rs1));
3959                     gen_op_load_fpr_DT1(DFPREG(rs2));
3960                     gen_helper_fcmpne16();
3961                     gen_op_store_DT0_fpr(DFPREG(rd));
3962                     break;
3963                 case 0x024: /* VIS I fcmple32 */
3964                     CHECK_FPU_FEATURE(dc, VIS1);
3965                     gen_op_load_fpr_DT0(DFPREG(rs1));
3966                     gen_op_load_fpr_DT1(DFPREG(rs2));
3967                     gen_helper_fcmple32();
3968                     gen_op_store_DT0_fpr(DFPREG(rd));
3969                     break;
3970                 case 0x026: /* VIS I fcmpne32 */
3971                     CHECK_FPU_FEATURE(dc, VIS1);
3972                     gen_op_load_fpr_DT0(DFPREG(rs1));
3973                     gen_op_load_fpr_DT1(DFPREG(rs2));
3974                     gen_helper_fcmpne32();
3975                     gen_op_store_DT0_fpr(DFPREG(rd));
3976                     break;
3977                 case 0x028: /* VIS I fcmpgt16 */
3978                     CHECK_FPU_FEATURE(dc, VIS1);
3979                     gen_op_load_fpr_DT0(DFPREG(rs1));
3980                     gen_op_load_fpr_DT1(DFPREG(rs2));
3981                     gen_helper_fcmpgt16();
3982                     gen_op_store_DT0_fpr(DFPREG(rd));
3983                     break;
3984                 case 0x02a: /* VIS I fcmpeq16 */
3985                     CHECK_FPU_FEATURE(dc, VIS1);
3986                     gen_op_load_fpr_DT0(DFPREG(rs1));
3987                     gen_op_load_fpr_DT1(DFPREG(rs2));
3988                     gen_helper_fcmpeq16();
3989                     gen_op_store_DT0_fpr(DFPREG(rd));
3990                     break;
3991                 case 0x02c: /* VIS I fcmpgt32 */
3992                     CHECK_FPU_FEATURE(dc, VIS1);
3993                     gen_op_load_fpr_DT0(DFPREG(rs1));
3994                     gen_op_load_fpr_DT1(DFPREG(rs2));
3995                     gen_helper_fcmpgt32();
3996                     gen_op_store_DT0_fpr(DFPREG(rd));
3997                     break;
3998                 case 0x02e: /* VIS I fcmpeq32 */
3999                     CHECK_FPU_FEATURE(dc, VIS1);
4000                     gen_op_load_fpr_DT0(DFPREG(rs1));
4001                     gen_op_load_fpr_DT1(DFPREG(rs2));
4002                     gen_helper_fcmpeq32();
4003                     gen_op_store_DT0_fpr(DFPREG(rd));
4004                     break;
4005                 case 0x031: /* VIS I fmul8x16 */
4006                     CHECK_FPU_FEATURE(dc, VIS1);
4007                     gen_op_load_fpr_DT0(DFPREG(rs1));
4008                     gen_op_load_fpr_DT1(DFPREG(rs2));
4009                     gen_helper_fmul8x16();
4010                     gen_op_store_DT0_fpr(DFPREG(rd));
4011                     break;
4012                 case 0x033: /* VIS I fmul8x16au */
4013                     CHECK_FPU_FEATURE(dc, VIS1);
4014                     gen_op_load_fpr_DT0(DFPREG(rs1));
4015                     gen_op_load_fpr_DT1(DFPREG(rs2));
4016                     gen_helper_fmul8x16au();
4017                     gen_op_store_DT0_fpr(DFPREG(rd));
4018                     break;
4019                 case 0x035: /* VIS I fmul8x16al */
4020                     CHECK_FPU_FEATURE(dc, VIS1);
4021                     gen_op_load_fpr_DT0(DFPREG(rs1));
4022                     gen_op_load_fpr_DT1(DFPREG(rs2));
4023                     gen_helper_fmul8x16al();
4024                     gen_op_store_DT0_fpr(DFPREG(rd));
4025                     break;
4026                 case 0x036: /* VIS I fmul8sux16 */
4027                     CHECK_FPU_FEATURE(dc, VIS1);
4028                     gen_op_load_fpr_DT0(DFPREG(rs1));
4029                     gen_op_load_fpr_DT1(DFPREG(rs2));
4030                     gen_helper_fmul8sux16();
4031                     gen_op_store_DT0_fpr(DFPREG(rd));
4032                     break;
4033                 case 0x037: /* VIS I fmul8ulx16 */
4034                     CHECK_FPU_FEATURE(dc, VIS1);
4035                     gen_op_load_fpr_DT0(DFPREG(rs1));
4036                     gen_op_load_fpr_DT1(DFPREG(rs2));
4037                     gen_helper_fmul8ulx16();
4038                     gen_op_store_DT0_fpr(DFPREG(rd));
4039                     break;
4040                 case 0x038: /* VIS I fmuld8sux16 */
4041                     CHECK_FPU_FEATURE(dc, VIS1);
4042                     gen_op_load_fpr_DT0(DFPREG(rs1));
4043                     gen_op_load_fpr_DT1(DFPREG(rs2));
4044                     gen_helper_fmuld8sux16();
4045                     gen_op_store_DT0_fpr(DFPREG(rd));
4046                     break;
4047                 case 0x039: /* VIS I fmuld8ulx16 */
4048                     CHECK_FPU_FEATURE(dc, VIS1);
4049                     gen_op_load_fpr_DT0(DFPREG(rs1));
4050                     gen_op_load_fpr_DT1(DFPREG(rs2));
4051                     gen_helper_fmuld8ulx16();
4052                     gen_op_store_DT0_fpr(DFPREG(rd));
4053                     break;
4054                 case 0x03a: /* VIS I fpack32 */
4055                 case 0x03b: /* VIS I fpack16 */
4056                 case 0x03d: /* VIS I fpackfix */
4057                 case 0x03e: /* VIS I pdist */
4058                     // XXX
4059                     goto illegal_insn;
4060                 case 0x048: /* VIS I faligndata */
4061                     CHECK_FPU_FEATURE(dc, VIS1);
4062                     gen_op_load_fpr_DT0(DFPREG(rs1));
4063                     gen_op_load_fpr_DT1(DFPREG(rs2));
4064                     gen_helper_faligndata();
4065                     gen_op_store_DT0_fpr(DFPREG(rd));
4066                     break;
4067                 case 0x04b: /* VIS I fpmerge */
4068                     CHECK_FPU_FEATURE(dc, VIS1);
4069                     gen_op_load_fpr_DT0(DFPREG(rs1));
4070                     gen_op_load_fpr_DT1(DFPREG(rs2));
4071                     gen_helper_fpmerge();
4072                     gen_op_store_DT0_fpr(DFPREG(rd));
4073                     break;
4074                 case 0x04c: /* VIS II bshuffle */
4075                     // XXX
4076                     goto illegal_insn;
4077                 case 0x04d: /* VIS I fexpand */
4078                     CHECK_FPU_FEATURE(dc, VIS1);
4079                     gen_op_load_fpr_DT0(DFPREG(rs1));
4080                     gen_op_load_fpr_DT1(DFPREG(rs2));
4081                     gen_helper_fexpand();
4082                     gen_op_store_DT0_fpr(DFPREG(rd));
4083                     break;
4084                 case 0x050: /* VIS I fpadd16 */
4085                     CHECK_FPU_FEATURE(dc, VIS1);
4086                     gen_op_load_fpr_DT0(DFPREG(rs1));
4087                     gen_op_load_fpr_DT1(DFPREG(rs2));
4088                     gen_helper_fpadd16();
4089                     gen_op_store_DT0_fpr(DFPREG(rd));
4090                     break;
4091                 case 0x051: /* VIS I fpadd16s */
4092                     CHECK_FPU_FEATURE(dc, VIS1);
4093                     gen_helper_fpadd16s(cpu_fpr[rd],
4094                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4095                     break;
4096                 case 0x052: /* VIS I fpadd32 */
4097                     CHECK_FPU_FEATURE(dc, VIS1);
4098                     gen_op_load_fpr_DT0(DFPREG(rs1));
4099                     gen_op_load_fpr_DT1(DFPREG(rs2));
4100                     gen_helper_fpadd32();
4101                     gen_op_store_DT0_fpr(DFPREG(rd));
4102                     break;
4103                 case 0x053: /* VIS I fpadd32s */
4104                     CHECK_FPU_FEATURE(dc, VIS1);
4105                     gen_helper_fpadd32s(cpu_fpr[rd],
4106                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4107                     break;
4108                 case 0x054: /* VIS I fpsub16 */
4109                     CHECK_FPU_FEATURE(dc, VIS1);
4110                     gen_op_load_fpr_DT0(DFPREG(rs1));
4111                     gen_op_load_fpr_DT1(DFPREG(rs2));
4112                     gen_helper_fpsub16();
4113                     gen_op_store_DT0_fpr(DFPREG(rd));
4114                     break;
4115                 case 0x055: /* VIS I fpsub16s */
4116                     CHECK_FPU_FEATURE(dc, VIS1);
4117                     gen_helper_fpsub16s(cpu_fpr[rd],
4118                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4119                     break;
4120                 case 0x056: /* VIS I fpsub32 */
4121                     CHECK_FPU_FEATURE(dc, VIS1);
4122                     gen_op_load_fpr_DT0(DFPREG(rs1));
4123                     gen_op_load_fpr_DT1(DFPREG(rs2));
4124                     gen_helper_fpsub32();
4125                     gen_op_store_DT0_fpr(DFPREG(rd));
4126                     break;
4127                 case 0x057: /* VIS I fpsub32s */
4128                     CHECK_FPU_FEATURE(dc, VIS1);
4129                     gen_helper_fpsub32s(cpu_fpr[rd],
4130                                         cpu_fpr[rs1], cpu_fpr[rs2]);
4131                     break;
4132                 case 0x060: /* VIS I fzero */
4133                     CHECK_FPU_FEATURE(dc, VIS1);
4134                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4135                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4136                     break;
4137                 case 0x061: /* VIS I fzeros */
4138                     CHECK_FPU_FEATURE(dc, VIS1);
4139                     tcg_gen_movi_i32(cpu_fpr[rd], 0);
4140                     break;
4141                 case 0x062: /* VIS I fnor */
4142                     CHECK_FPU_FEATURE(dc, VIS1);
4143                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4144                                     cpu_fpr[DFPREG(rs2)]);
4145                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4146                                     cpu_fpr[DFPREG(rs2) + 1]);
4147                     break;
4148                 case 0x063: /* VIS I fnors */
4149                     CHECK_FPU_FEATURE(dc, VIS1);
4150                     tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4151                     break;
4152                 case 0x064: /* VIS I fandnot2 */
4153                     CHECK_FPU_FEATURE(dc, VIS1);
4154                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4155                                      cpu_fpr[DFPREG(rs2)]);
4156                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4157                                      cpu_fpr[DFPREG(rs1) + 1],
4158                                      cpu_fpr[DFPREG(rs2) + 1]);
4159                     break;
4160                 case 0x065: /* VIS I fandnot2s */
4161                     CHECK_FPU_FEATURE(dc, VIS1);
4162                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4163                     break;
4164                 case 0x066: /* VIS I fnot2 */
4165                     CHECK_FPU_FEATURE(dc, VIS1);
4166                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4167                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4168                                     cpu_fpr[DFPREG(rs2) + 1]);
4169                     break;
4170                 case 0x067: /* VIS I fnot2s */
4171                     CHECK_FPU_FEATURE(dc, VIS1);
4172                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4173                     break;
4174                 case 0x068: /* VIS I fandnot1 */
4175                     CHECK_FPU_FEATURE(dc, VIS1);
4176                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4177                                      cpu_fpr[DFPREG(rs1)]);
4178                     tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4179                                      cpu_fpr[DFPREG(rs2) + 1],
4180                                      cpu_fpr[DFPREG(rs1) + 1]);
4181                     break;
4182                 case 0x069: /* VIS I fandnot1s */
4183                     CHECK_FPU_FEATURE(dc, VIS1);
4184                     tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4185                     break;
4186                 case 0x06a: /* VIS I fnot1 */
4187                     CHECK_FPU_FEATURE(dc, VIS1);
4188                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4189                     tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4190                                     cpu_fpr[DFPREG(rs1) + 1]);
4191                     break;
4192                 case 0x06b: /* VIS I fnot1s */
4193                     CHECK_FPU_FEATURE(dc, VIS1);
4194                     tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4195                     break;
4196                 case 0x06c: /* VIS I fxor */
4197                     CHECK_FPU_FEATURE(dc, VIS1);
4198                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4199                                     cpu_fpr[DFPREG(rs2)]);
4200                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4201                                     cpu_fpr[DFPREG(rs1) + 1],
4202                                     cpu_fpr[DFPREG(rs2) + 1]);
4203                     break;
4204                 case 0x06d: /* VIS I fxors */
4205                     CHECK_FPU_FEATURE(dc, VIS1);
4206                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4207                     break;
4208                 case 0x06e: /* VIS I fnand */
4209                     CHECK_FPU_FEATURE(dc, VIS1);
4210                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4211                                      cpu_fpr[DFPREG(rs2)]);
4212                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4213                                      cpu_fpr[DFPREG(rs2) + 1]);
4214                     break;
4215                 case 0x06f: /* VIS I fnands */
4216                     CHECK_FPU_FEATURE(dc, VIS1);
4217                     tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4218                     break;
4219                 case 0x070: /* VIS I fand */
4220                     CHECK_FPU_FEATURE(dc, VIS1);
4221                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4222                                     cpu_fpr[DFPREG(rs2)]);
4223                     tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4224                                     cpu_fpr[DFPREG(rs1) + 1],
4225                                     cpu_fpr[DFPREG(rs2) + 1]);
4226                     break;
4227                 case 0x071: /* VIS I fands */
4228                     CHECK_FPU_FEATURE(dc, VIS1);
4229                     tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4230                     break;
4231                 case 0x072: /* VIS I fxnor */
4232                     CHECK_FPU_FEATURE(dc, VIS1);
4233                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4234                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4235                                     cpu_fpr[DFPREG(rs1)]);
4236                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4237                     tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4238                                     cpu_fpr[DFPREG(rs1) + 1]);
4239                     break;
4240                 case 0x073: /* VIS I fxnors */
4241                     CHECK_FPU_FEATURE(dc, VIS1);
4242                     tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4243                     tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4244                     break;
4245                 case 0x074: /* VIS I fsrc1 */
4246                     CHECK_FPU_FEATURE(dc, VIS1);
4247                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4248                     tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4249                                     cpu_fpr[DFPREG(rs1) + 1]);
4250                     break;
4251                 case 0x075: /* VIS I fsrc1s */
4252                     CHECK_FPU_FEATURE(dc, VIS1);
4253                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4254                     break;
4255                 case 0x076: /* VIS I fornot2 */
4256                     CHECK_FPU_FEATURE(dc, VIS1);
4257                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4258                                     cpu_fpr[DFPREG(rs2)]);
4259                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4260                                     cpu_fpr[DFPREG(rs1) + 1],
4261                                     cpu_fpr[DFPREG(rs2) + 1]);
4262                     break;
4263                 case 0x077: /* VIS I fornot2s */
4264                     CHECK_FPU_FEATURE(dc, VIS1);
4265                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4266                     break;
4267                 case 0x078: /* VIS I fsrc2 */
4268                     CHECK_FPU_FEATURE(dc, VIS1);
4269                     gen_op_load_fpr_DT0(DFPREG(rs2));
4270                     gen_op_store_DT0_fpr(DFPREG(rd));
4271                     break;
4272                 case 0x079: /* VIS I fsrc2s */
4273                     CHECK_FPU_FEATURE(dc, VIS1);
4274                     tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4275                     break;
4276                 case 0x07a: /* VIS I fornot1 */
4277                     CHECK_FPU_FEATURE(dc, VIS1);
4278                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4279                                     cpu_fpr[DFPREG(rs1)]);
4280                     tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4281                                     cpu_fpr[DFPREG(rs2) + 1],
4282                                     cpu_fpr[DFPREG(rs1) + 1]);
4283                     break;
4284                 case 0x07b: /* VIS I fornot1s */
4285                     CHECK_FPU_FEATURE(dc, VIS1);
4286                     tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4287                     break;
4288                 case 0x07c: /* VIS I for */
4289                     CHECK_FPU_FEATURE(dc, VIS1);
4290                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4291                                    cpu_fpr[DFPREG(rs2)]);
4292                     tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4293                                    cpu_fpr[DFPREG(rs1) + 1],
4294                                    cpu_fpr[DFPREG(rs2) + 1]);
4295                     break;
4296                 case 0x07d: /* VIS I fors */
4297                     CHECK_FPU_FEATURE(dc, VIS1);
4298                     tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4299                     break;
4300                 case 0x07e: /* VIS I fone */
4301                     CHECK_FPU_FEATURE(dc, VIS1);
4302                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4303                     tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4304                     break;
4305                 case 0x07f: /* VIS I fones */
4306                     CHECK_FPU_FEATURE(dc, VIS1);
4307                     tcg_gen_movi_i32(cpu_fpr[rd], -1);
4308                     break;
4309                 case 0x080: /* VIS I shutdown */
4310                 case 0x081: /* VIS II siam */
4311                     // XXX
4312                     goto illegal_insn;
4313                 default:
4314                     goto illegal_insn;
4315                 }
4316 #else
4317                 goto ncp_insn;
4318 #endif
4319             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4320 #ifdef TARGET_SPARC64
4321                 goto illegal_insn;
4322 #else
4323                 goto ncp_insn;
4324 #endif
4325 #ifdef TARGET_SPARC64
4326             } else if (xop == 0x39) { /* V9 return */
4327                 TCGv_i32 r_const;
4328
4329                 save_state(dc, cpu_cond);
4330                 cpu_src1 = get_src1(insn, cpu_src1);
4331                 if (IS_IMM) {   /* immediate */
4332                     simm = GET_FIELDs(insn, 19, 31);
4333                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4334                 } else {                /* register */
4335                     rs2 = GET_FIELD(insn, 27, 31);
4336                     if (rs2) {
4337                         gen_movl_reg_TN(rs2, cpu_src2);
4338                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4339                     } else
4340                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4341                 }
4342                 gen_helper_restore();
4343                 gen_mov_pc_npc(dc, cpu_cond);
4344                 r_const = tcg_const_i32(3);
4345                 gen_helper_check_align(cpu_dst, r_const);
4346                 tcg_temp_free_i32(r_const);
4347                 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4348                 dc->npc = DYNAMIC_PC;
4349                 goto jmp_insn;
4350 #endif
4351             } else {
4352                 cpu_src1 = get_src1(insn, cpu_src1);
4353                 if (IS_IMM) {   /* immediate */
4354                     simm = GET_FIELDs(insn, 19, 31);
4355                     tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4356                 } else {                /* register */
4357                     rs2 = GET_FIELD(insn, 27, 31);
4358                     if (rs2) {
4359                         gen_movl_reg_TN(rs2, cpu_src2);
4360                         tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4361                     } else
4362                         tcg_gen_mov_tl(cpu_dst, cpu_src1);
4363                 }
4364                 switch (xop) {
4365                 case 0x38:      /* jmpl */
4366                     {
4367                         TCGv r_pc;
4368                         TCGv_i32 r_const;
4369
4370                         r_pc = tcg_const_tl(dc->pc);
4371                         gen_movl_TN_reg(rd, r_pc);
4372                         tcg_temp_free(r_pc);
4373                         gen_mov_pc_npc(dc, cpu_cond);
4374                         r_const = tcg_const_i32(3);
4375                         gen_helper_check_align(cpu_dst, r_const);
4376                         tcg_temp_free_i32(r_const);
4377                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4378                         dc->npc = DYNAMIC_PC;
4379                     }
4380                     goto jmp_insn;
4381 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4382                 case 0x39:      /* rett, V9 return */
4383                     {
4384                         TCGv_i32 r_const;
4385
4386                         if (!supervisor(dc))
4387                             goto priv_insn;
4388                         gen_mov_pc_npc(dc, cpu_cond);
4389                         r_const = tcg_const_i32(3);
4390                         gen_helper_check_align(cpu_dst, r_const);
4391                         tcg_temp_free_i32(r_const);
4392                         tcg_gen_mov_tl(cpu_npc, cpu_dst);
4393                         dc->npc = DYNAMIC_PC;
4394                         gen_helper_rett();
4395                     }
4396                     goto jmp_insn;
4397 #endif
4398                 case 0x3b: /* flush */
4399                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4400                         goto unimp_flush;
4401                     gen_helper_flush(cpu_dst);
4402                     break;
4403                 case 0x3c:      /* save */
4404                     save_state(dc, cpu_cond);
4405                     gen_helper_save();
4406                     gen_movl_TN_reg(rd, cpu_dst);
4407                     break;
4408                 case 0x3d:      /* restore */
4409                     save_state(dc, cpu_cond);
4410                     gen_helper_restore();
4411                     gen_movl_TN_reg(rd, cpu_dst);
4412                     break;
4413 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4414                 case 0x3e:      /* V9 done/retry */
4415                     {
4416                         switch (rd) {
4417                         case 0:
4418                             if (!supervisor(dc))
4419                                 goto priv_insn;
4420                             dc->npc = DYNAMIC_PC;
4421                             dc->pc = DYNAMIC_PC;
4422                             gen_helper_done();
4423                             goto jmp_insn;
4424                         case 1:
4425                             if (!supervisor(dc))
4426                                 goto priv_insn;
4427                             dc->npc = DYNAMIC_PC;
4428                             dc->pc = DYNAMIC_PC;
4429                             gen_helper_retry();
4430                             goto jmp_insn;
4431                         default:
4432                             goto illegal_insn;
4433                         }
4434                     }
4435                     break;
4436 #endif
4437                 default:
4438                     goto illegal_insn;
4439                 }
4440             }
4441             break;
4442         }
4443         break;
4444     case 3:                     /* load/store instructions */
4445         {
4446             unsigned int xop = GET_FIELD(insn, 7, 12);
4447
4448             cpu_src1 = get_src1(insn, cpu_src1);
4449             if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4450                 rs2 = GET_FIELD(insn, 27, 31);
4451                 gen_movl_reg_TN(rs2, cpu_src2);
4452                 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4453             } else if (IS_IMM) {     /* immediate */
4454                 simm = GET_FIELDs(insn, 19, 31);
4455                 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4456             } else {            /* register */
4457                 rs2 = GET_FIELD(insn, 27, 31);
4458                 if (rs2 != 0) {
4459                     gen_movl_reg_TN(rs2, cpu_src2);
4460                     tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4461                 } else
4462                     tcg_gen_mov_tl(cpu_addr, cpu_src1);
4463             }
4464             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4465                 (xop > 0x17 && xop <= 0x1d ) ||
4466                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4467                 switch (xop) {
4468                 case 0x0:       /* ld, V9 lduw, load unsigned word */
4469                     gen_address_mask(dc, cpu_addr);
4470                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4471                     break;
4472                 case 0x1:       /* ldub, load unsigned byte */
4473                     gen_address_mask(dc, cpu_addr);
4474                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4475                     break;
4476                 case 0x2:       /* lduh, load unsigned halfword */
4477                     gen_address_mask(dc, cpu_addr);
4478                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4479                     break;
4480                 case 0x3:       /* ldd, load double word */
4481                     if (rd & 1)
4482                         goto illegal_insn;
4483                     else {
4484                         TCGv_i32 r_const;
4485
4486                         save_state(dc, cpu_cond);
4487                         r_const = tcg_const_i32(7);
4488                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4489                         tcg_temp_free_i32(r_const);
4490                         gen_address_mask(dc, cpu_addr);
4491                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4492                         tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4493                         tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4494                         gen_movl_TN_reg(rd + 1, cpu_tmp0);
4495                         tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4496                         tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4497                         tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4498                     }
4499                     break;
4500                 case 0x9:       /* ldsb, load signed byte */
4501                     gen_address_mask(dc, cpu_addr);
4502                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4503                     break;
4504                 case 0xa:       /* ldsh, load signed halfword */
4505                     gen_address_mask(dc, cpu_addr);
4506                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4507                     break;
4508                 case 0xd:       /* ldstub -- XXX: should be atomically */
4509                     {
4510                         TCGv r_const;
4511
4512                         gen_address_mask(dc, cpu_addr);
4513                         tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4514                         r_const = tcg_const_tl(0xff);
4515                         tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4516                         tcg_temp_free(r_const);
4517                     }
4518                     break;
4519                 case 0x0f:      /* swap, swap register with memory. Also
4520                                    atomically */
4521                     CHECK_IU_FEATURE(dc, SWAP);
4522                     gen_movl_reg_TN(rd, cpu_val);
4523                     gen_address_mask(dc, cpu_addr);
4524                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4525                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4526                     tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4527                     break;
4528 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4529                 case 0x10:      /* lda, V9 lduwa, load word alternate */
4530 #ifndef TARGET_SPARC64
4531                     if (IS_IMM)
4532                         goto illegal_insn;
4533                     if (!supervisor(dc))
4534                         goto priv_insn;
4535 #endif
4536                     save_state(dc, cpu_cond);
4537                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4538                     break;
4539                 case 0x11:      /* lduba, load unsigned byte alternate */
4540 #ifndef TARGET_SPARC64
4541                     if (IS_IMM)
4542                         goto illegal_insn;
4543                     if (!supervisor(dc))
4544                         goto priv_insn;
4545 #endif
4546                     save_state(dc, cpu_cond);
4547                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4548                     break;
4549                 case 0x12:      /* lduha, load unsigned halfword alternate */
4550 #ifndef TARGET_SPARC64
4551                     if (IS_IMM)
4552                         goto illegal_insn;
4553                     if (!supervisor(dc))
4554                         goto priv_insn;
4555 #endif
4556                     save_state(dc, cpu_cond);
4557                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4558                     break;
4559                 case 0x13:      /* ldda, load double word alternate */
4560 #ifndef TARGET_SPARC64
4561                     if (IS_IMM)
4562                         goto illegal_insn;
4563                     if (!supervisor(dc))
4564                         goto priv_insn;
4565 #endif
4566                     if (rd & 1)
4567                         goto illegal_insn;
4568                     save_state(dc, cpu_cond);
4569                     gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4570                     goto skip_move;
4571                 case 0x19:      /* ldsba, load signed byte alternate */
4572 #ifndef TARGET_SPARC64
4573                     if (IS_IMM)
4574                         goto illegal_insn;
4575                     if (!supervisor(dc))
4576                         goto priv_insn;
4577 #endif
4578                     save_state(dc, cpu_cond);
4579                     gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4580                     break;
4581                 case 0x1a:      /* ldsha, load signed halfword alternate */
4582 #ifndef TARGET_SPARC64
4583                     if (IS_IMM)
4584                         goto illegal_insn;
4585                     if (!supervisor(dc))
4586                         goto priv_insn;
4587 #endif
4588                     save_state(dc, cpu_cond);
4589                     gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4590                     break;
4591                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
4592 #ifndef TARGET_SPARC64
4593                     if (IS_IMM)
4594                         goto illegal_insn;
4595                     if (!supervisor(dc))
4596                         goto priv_insn;
4597 #endif
4598                     save_state(dc, cpu_cond);
4599                     gen_ldstub_asi(cpu_val, cpu_addr, insn);
4600                     break;
4601                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
4602                                    atomically */
4603                     CHECK_IU_FEATURE(dc, SWAP);
4604 #ifndef TARGET_SPARC64
4605                     if (IS_IMM)
4606                         goto illegal_insn;
4607                     if (!supervisor(dc))
4608                         goto priv_insn;
4609 #endif
4610                     save_state(dc, cpu_cond);
4611                     gen_movl_reg_TN(rd, cpu_val);
4612                     gen_swap_asi(cpu_val, cpu_addr, insn);
4613                     break;
4614
4615 #ifndef TARGET_SPARC64
4616                 case 0x30: /* ldc */
4617                 case 0x31: /* ldcsr */
4618                 case 0x33: /* lddc */
4619                     goto ncp_insn;
4620 #endif
4621 #endif
4622 #ifdef TARGET_SPARC64
4623                 case 0x08: /* V9 ldsw */
4624                     gen_address_mask(dc, cpu_addr);
4625                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4626                     break;
4627                 case 0x0b: /* V9 ldx */
4628                     gen_address_mask(dc, cpu_addr);
4629                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4630                     break;
4631                 case 0x18: /* V9 ldswa */
4632                     save_state(dc, cpu_cond);
4633                     gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4634                     break;
4635                 case 0x1b: /* V9 ldxa */
4636                     save_state(dc, cpu_cond);
4637                     gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4638                     break;
4639                 case 0x2d: /* V9 prefetch, no effect */
4640                     goto skip_move;
4641                 case 0x30: /* V9 ldfa */
4642                     save_state(dc, cpu_cond);
4643                     gen_ldf_asi(cpu_addr, insn, 4, rd);
4644                     goto skip_move;
4645                 case 0x33: /* V9 lddfa */
4646                     save_state(dc, cpu_cond);
4647                     gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4648                     goto skip_move;
4649                 case 0x3d: /* V9 prefetcha, no effect */
4650                     goto skip_move;
4651                 case 0x32: /* V9 ldqfa */
4652                     CHECK_FPU_FEATURE(dc, FLOAT128);
4653                     save_state(dc, cpu_cond);
4654                     gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4655                     goto skip_move;
4656 #endif
4657                 default:
4658                     goto illegal_insn;
4659                 }
4660                 gen_movl_TN_reg(rd, cpu_val);
4661 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4662             skip_move: ;
4663 #endif
4664             } else if (xop >= 0x20 && xop < 0x24) {
4665                 if (gen_trap_ifnofpu(dc, cpu_cond))
4666                     goto jmp_insn;
4667                 save_state(dc, cpu_cond);
4668                 switch (xop) {
4669                 case 0x20:      /* ldf, load fpreg */
4670                     gen_address_mask(dc, cpu_addr);
4671                     tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4672                     tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4673                     break;
4674                 case 0x21:      /* ldfsr, V9 ldxfsr */
4675 #ifdef TARGET_SPARC64
4676                     gen_address_mask(dc, cpu_addr);
4677                     if (rd == 1) {
4678                         tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4679                         gen_helper_ldxfsr(cpu_tmp64);
4680                     } else
4681 #else
4682                     {
4683                         tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4684                         gen_helper_ldfsr(cpu_tmp32);
4685                     }
4686 #endif
4687                     break;
4688                 case 0x22:      /* ldqf, load quad fpreg */
4689                     {
4690                         TCGv_i32 r_const;
4691
4692                         CHECK_FPU_FEATURE(dc, FLOAT128);
4693                         r_const = tcg_const_i32(dc->mem_idx);
4694                         gen_helper_ldqf(cpu_addr, r_const);
4695                         tcg_temp_free_i32(r_const);
4696                         gen_op_store_QT0_fpr(QFPREG(rd));
4697                     }
4698                     break;
4699                 case 0x23:      /* lddf, load double fpreg */
4700                     {
4701                         TCGv_i32 r_const;
4702
4703                         r_const = tcg_const_i32(dc->mem_idx);
4704                         gen_helper_lddf(cpu_addr, r_const);
4705                         tcg_temp_free_i32(r_const);
4706                         gen_op_store_DT0_fpr(DFPREG(rd));
4707                     }
4708                     break;
4709                 default:
4710                     goto illegal_insn;
4711                 }
4712             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4713                        xop == 0xe || xop == 0x1e) {
4714                 gen_movl_reg_TN(rd, cpu_val);
4715                 switch (xop) {
4716                 case 0x4: /* st, store word */
4717                     gen_address_mask(dc, cpu_addr);
4718                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4719                     break;
4720                 case 0x5: /* stb, store byte */
4721                     gen_address_mask(dc, cpu_addr);
4722                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4723                     break;
4724                 case 0x6: /* sth, store halfword */
4725                     gen_address_mask(dc, cpu_addr);
4726                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4727                     break;
4728                 case 0x7: /* std, store double word */
4729                     if (rd & 1)
4730                         goto illegal_insn;
4731                     else {
4732                         TCGv_i32 r_const;
4733
4734                         save_state(dc, cpu_cond);
4735                         gen_address_mask(dc, cpu_addr);
4736                         r_const = tcg_const_i32(7);
4737                         gen_helper_check_align(cpu_addr, r_const); // XXX remove
4738                         tcg_temp_free_i32(r_const);
4739                         gen_movl_reg_TN(rd + 1, cpu_tmp0);
4740                         tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4741                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4742                     }
4743                     break;
4744 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4745                 case 0x14: /* sta, V9 stwa, store word alternate */
4746 #ifndef TARGET_SPARC64
4747                     if (IS_IMM)
4748                         goto illegal_insn;
4749                     if (!supervisor(dc))
4750                         goto priv_insn;
4751 #endif
4752                     save_state(dc, cpu_cond);
4753                     gen_st_asi(cpu_val, cpu_addr, insn, 4);
4754                     break;
4755                 case 0x15: /* stba, store byte alternate */
4756 #ifndef TARGET_SPARC64
4757                     if (IS_IMM)
4758                         goto illegal_insn;
4759                     if (!supervisor(dc))
4760                         goto priv_insn;
4761 #endif
4762                     save_state(dc, cpu_cond);
4763                     gen_st_asi(cpu_val, cpu_addr, insn, 1);
4764                     break;
4765                 case 0x16: /* stha, store halfword alternate */
4766 #ifndef TARGET_SPARC64
4767                     if (IS_IMM)
4768                         goto illegal_insn;
4769                     if (!supervisor(dc))
4770                         goto priv_insn;
4771 #endif
4772                     save_state(dc, cpu_cond);
4773                     gen_st_asi(cpu_val, cpu_addr, insn, 2);
4774                     break;
4775                 case 0x17: /* stda, store double word alternate */
4776 #ifndef TARGET_SPARC64
4777                     if (IS_IMM)
4778                         goto illegal_insn;
4779                     if (!supervisor(dc))
4780                         goto priv_insn;
4781 #endif
4782                     if (rd & 1)
4783                         goto illegal_insn;
4784                     else {
4785                         save_state(dc, cpu_cond);
4786                         gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4787                     }
4788                     break;
4789 #endif
4790 #ifdef TARGET_SPARC64
4791                 case 0x0e: /* V9 stx */
4792                     gen_address_mask(dc, cpu_addr);
4793                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4794                     break;
4795                 case 0x1e: /* V9 stxa */
4796                     save_state(dc, cpu_cond);
4797                     gen_st_asi(cpu_val, cpu_addr, insn, 8);
4798                     break;
4799 #endif
4800                 default:
4801                     goto illegal_insn;
4802                 }
4803             } else if (xop > 0x23 && xop < 0x28) {
4804                 if (gen_trap_ifnofpu(dc, cpu_cond))
4805                     goto jmp_insn;
4806                 save_state(dc, cpu_cond);
4807                 switch (xop) {
4808                 case 0x24: /* stf, store fpreg */
4809                     gen_address_mask(dc, cpu_addr);
4810                     tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4811                     tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4812                     break;
4813                 case 0x25: /* stfsr, V9 stxfsr */
4814 #ifdef TARGET_SPARC64
4815                     gen_address_mask(dc, cpu_addr);
4816                     tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4817                     if (rd == 1)
4818                         tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4819                     else
4820                         tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4821 #else
4822                     tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4823                     tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4824 #endif
4825                     break;
4826                 case 0x26:
4827 #ifdef TARGET_SPARC64
4828                     /* V9 stqf, store quad fpreg */
4829                     {
4830                         TCGv_i32 r_const;
4831
4832                         CHECK_FPU_FEATURE(dc, FLOAT128);
4833                         gen_op_load_fpr_QT0(QFPREG(rd));
4834                         r_const = tcg_const_i32(dc->mem_idx);
4835                         gen_helper_stqf(cpu_addr, r_const);
4836                         tcg_temp_free_i32(r_const);
4837                     }
4838                     break;
4839 #else /* !TARGET_SPARC64 */
4840                     /* stdfq, store floating point queue */
4841 #if defined(CONFIG_USER_ONLY)
4842                     goto illegal_insn;
4843 #else
4844                     if (!supervisor(dc))
4845                         goto priv_insn;
4846                     if (gen_trap_ifnofpu(dc, cpu_cond))
4847                         goto jmp_insn;
4848                     goto nfq_insn;
4849 #endif
4850 #endif
4851                 case 0x27: /* stdf, store double fpreg */
4852                     {
4853                         TCGv_i32 r_const;
4854
4855                         gen_op_load_fpr_DT0(DFPREG(rd));
4856                         r_const = tcg_const_i32(dc->mem_idx);
4857                         gen_helper_stdf(cpu_addr, r_const);
4858                         tcg_temp_free_i32(r_const);
4859                     }
4860                     break;
4861                 default:
4862                     goto illegal_insn;
4863                 }
4864             } else if (xop > 0x33 && xop < 0x3f) {
4865                 save_state(dc, cpu_cond);
4866                 switch (xop) {
4867 #ifdef TARGET_SPARC64
4868                 case 0x34: /* V9 stfa */
4869                     gen_stf_asi(cpu_addr, insn, 4, rd);
4870                     break;
4871                 case 0x36: /* V9 stqfa */
4872                     {
4873                         TCGv_i32 r_const;
4874
4875                         CHECK_FPU_FEATURE(dc, FLOAT128);
4876                         r_const = tcg_const_i32(7);
4877                         gen_helper_check_align(cpu_addr, r_const);
4878                         tcg_temp_free_i32(r_const);
4879                         gen_op_load_fpr_QT0(QFPREG(rd));
4880                         gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4881                     }
4882                     break;
4883                 case 0x37: /* V9 stdfa */
4884                     gen_op_load_fpr_DT0(DFPREG(rd));
4885                     gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4886                     break;
4887                 case 0x3c: /* V9 casa */
4888                     gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4889                     gen_movl_TN_reg(rd, cpu_val);
4890                     break;
4891                 case 0x3e: /* V9 casxa */
4892                     gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4893                     gen_movl_TN_reg(rd, cpu_val);
4894                     break;
4895 #else
4896                 case 0x34: /* stc */
4897                 case 0x35: /* stcsr */
4898                 case 0x36: /* stdcq */
4899                 case 0x37: /* stdc */
4900                     goto ncp_insn;
4901 #endif
4902                 default:
4903                     goto illegal_insn;
4904                 }
4905             } else
4906                 goto illegal_insn;
4907         }
4908         break;
4909     }
4910     /* default case for non jump instructions */
4911     if (dc->npc == DYNAMIC_PC) {
4912         dc->pc = DYNAMIC_PC;
4913         gen_op_next_insn();
4914     } else if (dc->npc == JUMP_PC) {
4915         /* we can do a static jump */
4916         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4917         dc->is_br = 1;
4918     } else {
4919         dc->pc = dc->npc;
4920         dc->npc = dc->npc + 4;
4921     }
4922  jmp_insn:
4923     return;
4924  illegal_insn:
4925     {
4926         TCGv_i32 r_const;
4927
4928         save_state(dc, cpu_cond);
4929         r_const = tcg_const_i32(TT_ILL_INSN);
4930         gen_helper_raise_exception(r_const);
4931         tcg_temp_free_i32(r_const);
4932         dc->is_br = 1;
4933     }
4934     return;
4935  unimp_flush:
4936     {
4937         TCGv_i32 r_const;
4938
4939         save_state(dc, cpu_cond);
4940         r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4941         gen_helper_raise_exception(r_const);
4942         tcg_temp_free_i32(r_const);
4943         dc->is_br = 1;
4944     }
4945     return;
4946 #if !defined(CONFIG_USER_ONLY)
4947  priv_insn:
4948     {
4949         TCGv_i32 r_const;
4950
4951         save_state(dc, cpu_cond);
4952         r_const = tcg_const_i32(TT_PRIV_INSN);
4953         gen_helper_raise_exception(r_const);
4954         tcg_temp_free_i32(r_const);
4955         dc->is_br = 1;
4956     }
4957     return;
4958 #endif
4959  nfpu_insn:
4960     save_state(dc, cpu_cond);
4961     gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4962     dc->is_br = 1;
4963     return;
4964 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4965  nfq_insn:
4966     save_state(dc, cpu_cond);
4967     gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4968     dc->is_br = 1;
4969     return;
4970 #endif
4971 #ifndef TARGET_SPARC64
4972  ncp_insn:
4973     {
4974         TCGv r_const;
4975
4976         save_state(dc, cpu_cond);
4977         r_const = tcg_const_i32(TT_NCP_INSN);
4978         gen_helper_raise_exception(r_const);
4979         tcg_temp_free(r_const);
4980         dc->is_br = 1;
4981     }
4982     return;
4983 #endif
4984 }
4985
4986 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4987                                                   int spc, CPUSPARCState *env)
4988 {
4989     target_ulong pc_start, last_pc;
4990     uint16_t *gen_opc_end;
4991     DisasContext dc1, *dc = &dc1;
4992     CPUBreakpoint *bp;
4993     int j, lj = -1;
4994     int num_insns;
4995     int max_insns;
4996
4997     memset(dc, 0, sizeof(DisasContext));
4998     dc->tb = tb;
4999     pc_start = tb->pc;
5000     dc->pc = pc_start;
5001     last_pc = dc->pc;
5002     dc->npc = (target_ulong) tb->cs_base;
5003     dc->cc_op = CC_OP_DYNAMIC;
5004     dc->mem_idx = cpu_mmu_index(env);
5005     dc->def = env->def;
5006     if ((dc->def->features & CPU_FEATURE_FLOAT))
5007         dc->fpu_enabled = cpu_fpu_enabled(env);
5008     else
5009         dc->fpu_enabled = 0;
5010 #ifdef TARGET_SPARC64
5011     dc->address_mask_32bit = env->pstate & PS_AM;
5012 #endif
5013     gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5014
5015     cpu_tmp0 = tcg_temp_new();
5016     cpu_tmp32 = tcg_temp_new_i32();
5017     cpu_tmp64 = tcg_temp_new_i64();
5018
5019     cpu_dst = tcg_temp_local_new();
5020
5021     // loads and stores
5022     cpu_val = tcg_temp_local_new();
5023     cpu_addr = tcg_temp_local_new();
5024
5025     num_insns = 0;
5026     max_insns = tb->cflags & CF_COUNT_MASK;
5027     if (max_insns == 0)
5028         max_insns = CF_COUNT_MASK;
5029     gen_icount_start();
5030     do {
5031         if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
5032             TAILQ_FOREACH(bp, &env->breakpoints, entry) {
5033                 if (bp->pc == dc->pc) {
5034                     if (dc->pc != pc_start)
5035                         save_state(dc, cpu_cond);
5036                     gen_helper_debug();
5037                     tcg_gen_exit_tb(0);
5038                     dc->is_br = 1;
5039                     goto exit_gen_loop;
5040                 }
5041             }
5042         }
5043         if (spc) {
5044             qemu_log("Search PC...\n");
5045             j = gen_opc_ptr - gen_opc_buf;
5046             if (lj < j) {
5047                 lj++;
5048                 while (lj < j)
5049                     gen_opc_instr_start[lj++] = 0;
5050                 gen_opc_pc[lj] = dc->pc;
5051                 gen_opc_npc[lj] = dc->npc;
5052                 gen_opc_instr_start[lj] = 1;
5053                 gen_opc_icount[lj] = num_insns;
5054             }
5055         }
5056         if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5057             gen_io_start();
5058         last_pc = dc->pc;
5059         disas_sparc_insn(dc);
5060         num_insns++;
5061
5062         if (dc->is_br)
5063             break;
5064         /* if the next PC is different, we abort now */
5065         if (dc->pc != (last_pc + 4))
5066             break;
5067         /* if we reach a page boundary, we stop generation so that the
5068            PC of a TT_TFAULT exception is always in the right page */
5069         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5070             break;
5071         /* if single step mode, we generate only one instruction and
5072            generate an exception */
5073         if (env->singlestep_enabled || singlestep) {
5074             tcg_gen_movi_tl(cpu_pc, dc->pc);
5075             tcg_gen_exit_tb(0);
5076             break;
5077         }
5078     } while ((gen_opc_ptr < gen_opc_end) &&
5079              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5080              num_insns < max_insns);
5081
5082  exit_gen_loop:
5083     tcg_temp_free(cpu_addr);
5084     tcg_temp_free(cpu_val);
5085     tcg_temp_free(cpu_dst);
5086     tcg_temp_free_i64(cpu_tmp64);
5087     tcg_temp_free_i32(cpu_tmp32);
5088     tcg_temp_free(cpu_tmp0);
5089     if (tb->cflags & CF_LAST_IO)
5090         gen_io_end();
5091     if (!dc->is_br) {
5092         if (dc->pc != DYNAMIC_PC &&
5093             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5094             /* static PC and NPC: we can use direct chaining */
5095             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5096         } else {
5097             if (dc->pc != DYNAMIC_PC)
5098                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5099             save_npc(dc, cpu_cond);
5100             tcg_gen_exit_tb(0);
5101         }
5102     }
5103     gen_icount_end(tb, num_insns);
5104     *gen_opc_ptr = INDEX_op_end;
5105     if (spc) {
5106         j = gen_opc_ptr - gen_opc_buf;
5107         lj++;
5108         while (lj <= j)
5109             gen_opc_instr_start[lj++] = 0;
5110 #if 0
5111         log_page_dump();
5112 #endif
5113         gen_opc_jump_pc[0] = dc->jump_pc[0];
5114         gen_opc_jump_pc[1] = dc->jump_pc[1];
5115     } else {
5116         tb->size = last_pc + 4 - pc_start;
5117         tb->icount = num_insns;
5118     }
5119 #ifdef DEBUG_DISAS
5120     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5121         qemu_log("--------------\n");
5122         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5123         log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5124         qemu_log("\n");
5125     }
5126 #endif
5127 }
5128
5129 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5130 {
5131     gen_intermediate_code_internal(tb, 0, env);
5132 }
5133
5134 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5135 {
5136     gen_intermediate_code_internal(tb, 1, env);
5137 }
5138
5139 void gen_intermediate_code_init(CPUSPARCState *env)
5140 {
5141     unsigned int i;
5142     static int inited;
5143     static const char * const gregnames[8] = {
5144         NULL, // g0 not used
5145         "g1",
5146         "g2",
5147         "g3",
5148         "g4",
5149         "g5",
5150         "g6",
5151         "g7",
5152     };
5153     static const char * const fregnames[64] = {
5154         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5155         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5156         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5157         "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5158         "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5159         "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5160         "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5161         "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5162     };
5163
5164     /* init various static tables */
5165     if (!inited) {
5166         inited = 1;
5167
5168         cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5169         cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5170                                              offsetof(CPUState, regwptr),
5171                                              "regwptr");
5172 #ifdef TARGET_SPARC64
5173         cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5174                                          "xcc");
5175         cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5176                                          "asi");
5177         cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5178                                           "fprs");
5179         cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5180                                      "gsr");
5181         cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5182                                            offsetof(CPUState, tick_cmpr),
5183                                            "tick_cmpr");
5184         cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5185                                             offsetof(CPUState, stick_cmpr),
5186                                             "stick_cmpr");
5187         cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5188                                              offsetof(CPUState, hstick_cmpr),
5189                                              "hstick_cmpr");
5190         cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5191                                        "hintp");
5192         cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5193                                       "htba");
5194         cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5195                                       "hver");
5196         cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5197                                      offsetof(CPUState, ssr), "ssr");
5198         cpu_ver = tcg_global_mem_new(TCG_AREG0,
5199                                      offsetof(CPUState, version), "ver");
5200         cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5201                                              offsetof(CPUState, softint),
5202                                              "softint");
5203 #else
5204         cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5205                                      "wim");
5206 #endif
5207         cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5208                                       "cond");
5209         cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5210                                         "cc_src");
5211         cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5212                                          offsetof(CPUState, cc_src2),
5213                                          "cc_src2");
5214         cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5215                                         "cc_dst");
5216         cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5217                                            "cc_op");
5218         cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5219                                          "psr");
5220         cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5221                                      "fsr");
5222         cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5223                                     "pc");
5224         cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5225                                      "npc");
5226         cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5227 #ifndef CONFIG_USER_ONLY
5228         cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5229                                      "tbr");
5230 #endif
5231         for (i = 1; i < 8; i++)
5232             cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5233                                               offsetof(CPUState, gregs[i]),
5234                                               gregnames[i]);
5235         for (i = 0; i < TARGET_FPREGS; i++)
5236             cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5237                                                 offsetof(CPUState, fpr[i]),
5238                                                 fregnames[i]);
5239
5240         /* register helpers */
5241
5242 #define GEN_HELPER 2
5243 #include "helper.h"
5244     }
5245 }
5246
5247 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5248                 unsigned long searched_pc, int pc_pos, void *puc)
5249 {
5250     target_ulong npc;
5251     env->pc = gen_opc_pc[pc_pos];
5252     npc = gen_opc_npc[pc_pos];
5253     if (npc == 1) {
5254         /* dynamic NPC: already stored */
5255     } else if (npc == 2) {
5256         target_ulong t2 = (target_ulong)(unsigned long)puc;
5257         /* jump PC: use T2 and the jump targets of the translation */
5258         if (t2)
5259             env->npc = gen_opc_jump_pc[0];
5260         else
5261             env->npc = gen_opc_jump_pc[1];
5262     } else {
5263         env->npc = npc;
5264     }
5265 }