2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
35 #define ALPHA_DEBUG_DISAS
38 typedef struct DisasContext DisasContext;
42 #if !defined (CONFIG_USER_ONLY)
48 /* global register indexes */
50 static TCGv cpu_ir[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names[10*4+21*5];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init = 0;
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
77 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
78 offsetof(CPUState, t2), "T2");
80 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
81 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
82 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG3, "T2");
86 for (i = 0; i < 31; i++) {
87 sprintf(p, "ir%d", i);
88 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
89 offsetof(CPUState, ir[i]), p);
90 p += (i < 10) ? 4 : 5;
93 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
94 offsetof(CPUState, pc), "pc");
96 /* register helpers */
98 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
104 static always_inline void gen_op_nop (void)
106 #if defined(GENERATE_NOP)
111 #define GEN32(func, NAME) \
112 static GenOpFunc *NAME ## _table [32] = { \
113 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
114 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
115 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
116 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
117 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
118 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
119 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
120 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 static always_inline void func (int n) \
124 NAME ## _table[n](); \
128 /* Special hacks for fir31 */
129 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
130 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
131 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
132 #define gen_op_store_FT0_fir31 gen_op_nop
133 #define gen_op_store_FT1_fir31 gen_op_nop
134 #define gen_op_store_FT2_fir31 gen_op_nop
135 #define gen_op_cmov_fir31 gen_op_nop
136 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
137 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
138 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
139 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
140 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
141 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
142 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
144 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
148 gen_op_load_FT0_fir(firn);
151 gen_op_load_FT1_fir(firn);
154 gen_op_load_FT2_fir(firn);
159 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
163 gen_op_store_FT0_fir(firn);
166 gen_op_store_FT1_fir(firn);
169 gen_op_store_FT2_fir(firn);
175 #if defined(CONFIG_USER_ONLY)
176 #define OP_LD_TABLE(width) \
177 static GenOpFunc *gen_op_ld##width[] = { \
178 &gen_op_ld##width##_raw, \
180 #define OP_ST_TABLE(width) \
181 static GenOpFunc *gen_op_st##width[] = { \
182 &gen_op_st##width##_raw, \
185 #define OP_LD_TABLE(width) \
186 static GenOpFunc *gen_op_ld##width[] = { \
187 &gen_op_ld##width##_kernel, \
188 &gen_op_ld##width##_executive, \
189 &gen_op_ld##width##_supervisor, \
190 &gen_op_ld##width##_user, \
192 #define OP_ST_TABLE(width) \
193 static GenOpFunc *gen_op_st##width[] = { \
194 &gen_op_st##width##_kernel, \
195 &gen_op_st##width##_executive, \
196 &gen_op_st##width##_supervisor, \
197 &gen_op_st##width##_user, \
201 #define GEN_LD(width) \
202 OP_LD_TABLE(width); \
203 static always_inline void gen_ld##width (DisasContext *ctx) \
205 (*gen_op_ld##width[ctx->mem_idx])(); \
208 #define GEN_ST(width) \
209 OP_ST_TABLE(width); \
210 static always_inline void gen_st##width (DisasContext *ctx) \
212 (*gen_op_st##width[ctx->mem_idx])(); \
230 #if 0 /* currently unused */
241 static always_inline void _gen_op_bcond (DisasContext *ctx)
243 #if 0 // Qemu does not know how to do this...
244 gen_op_bcond(ctx->pc);
246 gen_op_bcond(ctx->pc >> 32, ctx->pc);
250 static always_inline void gen_excp (DisasContext *ctx,
251 int exception, int error_code)
253 tcg_gen_movi_i64(cpu_pc, ctx->pc);
254 gen_op_excp(exception, error_code);
257 static always_inline void gen_invalid (DisasContext *ctx)
259 gen_excp(ctx, EXCP_OPCDEC, 0);
262 static always_inline void gen_load_mem (DisasContext *ctx,
263 void (*gen_load_op)(DisasContext *ctx),
264 int ra, int rb, int32_t disp16,
267 if (ra == 31 && disp16 == 0) {
272 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
274 tcg_gen_movi_i64(cpu_T[0], disp16);
276 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
279 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
283 static always_inline void gen_store_mem (DisasContext *ctx,
284 void (*gen_store_op)(DisasContext *ctx),
285 int ra, int rb, int32_t disp16,
289 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
291 tcg_gen_movi_i64(cpu_T[0], disp16);
293 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
295 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
297 tcg_gen_movi_i64(cpu_T[1], 0);
298 (*gen_store_op)(ctx);
301 static always_inline void gen_load_fmem (DisasContext *ctx,
302 void (*gen_load_fop)(DisasContext *ctx),
303 int ra, int rb, int32_t disp16)
306 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
308 tcg_gen_movi_i64(cpu_T[0], disp16);
309 (*gen_load_fop)(ctx);
310 gen_store_fir(ctx, ra, 1);
313 static always_inline void gen_store_fmem (DisasContext *ctx,
314 void (*gen_store_fop)(DisasContext *ctx),
315 int ra, int rb, int32_t disp16)
318 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
320 tcg_gen_movi_i64(cpu_T[0], disp16);
321 gen_load_fir(ctx, ra, 1);
322 (*gen_store_fop)(ctx);
325 static always_inline void gen_bcond (DisasContext *ctx,
327 int ra, int32_t disp16, int mask)
331 l1 = gen_new_label();
332 l2 = gen_new_label();
333 if (likely(ra != 31)) {
335 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
336 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
337 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
340 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
342 /* Very uncommon case - Do not bother to optimize. */
343 TCGv tmp = tcg_const_i64(0);
344 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
347 tcg_gen_movi_i64(cpu_pc, ctx->pc);
350 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
354 static always_inline void gen_fbcond (DisasContext *ctx,
355 void (*gen_test_op)(void),
356 int ra, int32_t disp16)
358 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
359 gen_load_fir(ctx, ra, 0);
364 static always_inline void gen_arith3 (DisasContext *ctx,
365 void (*gen_arith_op)(void),
366 int ra, int rb, int rc,
367 int islit, uint8_t lit)
370 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
372 tcg_gen_movi_i64(cpu_T[0], 0);
374 tcg_gen_movi_i64(cpu_T[1], lit);
376 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
378 tcg_gen_movi_i64(cpu_T[1], 0);
381 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
384 static always_inline void gen_cmov (DisasContext *ctx,
386 int ra, int rb, int rc,
387 int islit, int8_t lit, int mask)
391 if (unlikely(rc == 31))
394 l1 = gen_new_label();
398 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
399 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
400 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
403 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
405 /* Very uncommon case - Do not bother to optimize. */
406 TCGv tmp = tcg_const_i64(0);
407 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
412 tcg_gen_movi_i64(cpu_ir[rc], lit);
414 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
416 tcg_gen_movi_i64(cpu_ir[rc], 0);
420 static always_inline void gen_farith2 (DisasContext *ctx,
421 void (*gen_arith_fop)(void),
424 gen_load_fir(ctx, rb, 0);
426 gen_store_fir(ctx, rc, 0);
429 static always_inline void gen_farith3 (DisasContext *ctx,
430 void (*gen_arith_fop)(void),
431 int ra, int rb, int rc)
433 gen_load_fir(ctx, ra, 0);
434 gen_load_fir(ctx, rb, 1);
436 gen_store_fir(ctx, rc, 0);
439 static always_inline void gen_fcmov (DisasContext *ctx,
440 void (*gen_test_fop)(void),
441 int ra, int rb, int rc)
443 gen_load_fir(ctx, ra, 0);
444 gen_load_fir(ctx, rb, 1);
449 static always_inline void gen_fti (DisasContext *ctx,
450 void (*gen_move_fop)(void),
453 gen_load_fir(ctx, rc, 0);
456 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
459 static always_inline void gen_itf (DisasContext *ctx,
460 void (*gen_move_fop)(void),
464 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
466 tcg_gen_movi_i64(cpu_T[0], 0);
468 gen_store_fir(ctx, rc, 0);
471 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
474 int32_t disp21, disp16, disp12;
476 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
480 /* Decode all instruction fields */
482 ra = (insn >> 21) & 0x1F;
483 rb = (insn >> 16) & 0x1F;
485 sbz = (insn >> 13) & 0x07;
486 islit = (insn >> 12) & 1;
487 lit = (insn >> 13) & 0xFF;
488 palcode = insn & 0x03FFFFFF;
489 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
490 disp16 = (int16_t)(insn & 0x0000FFFF);
491 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
492 fn16 = insn & 0x0000FFFF;
493 fn11 = (insn >> 5) & 0x000007FF;
495 fn7 = (insn >> 5) & 0x0000007F;
496 fn2 = (insn >> 5) & 0x00000003;
498 #if defined ALPHA_DEBUG_DISAS
499 if (logfile != NULL) {
500 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
501 opc, ra, rb, rc, disp16);
507 if (palcode >= 0x80 && palcode < 0xC0) {
508 /* Unprivileged PAL call */
509 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
510 #if !defined (CONFIG_USER_ONLY)
511 } else if (palcode < 0x40) {
512 /* Privileged PAL code */
513 if (ctx->mem_idx & 1)
516 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
519 /* Invalid PAL call */
547 if (likely(ra != 31)) {
549 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
551 tcg_gen_movi_i64(cpu_ir[ra], disp16);
556 if (likely(ra != 31)) {
558 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
560 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
565 if (!(ctx->amask & AMASK_BWX))
567 gen_load_mem(ctx, &gen_ldbu, ra, rb, disp16, 0);
571 gen_load_mem(ctx, &gen_ldq_u, ra, rb, disp16, 1);
575 if (!(ctx->amask & AMASK_BWX))
577 gen_load_mem(ctx, &gen_ldwu, ra, rb, disp16, 0);
581 if (!(ctx->amask & AMASK_BWX))
583 gen_store_mem(ctx, &gen_stw, ra, rb, disp16, 0);
587 if (!(ctx->amask & AMASK_BWX))
589 gen_store_mem(ctx, &gen_stb, ra, rb, disp16, 0);
593 gen_store_mem(ctx, &gen_stq_u, ra, rb, disp16, 1);
599 if (likely(rc != 31)) {
602 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
603 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
604 } else if (rb != 31) {
605 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
606 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
608 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[ra]);
611 tcg_gen_movi_i64(cpu_ir[rc], (int32_t)lit);
613 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
615 tcg_gen_movi_i64(cpu_ir[rc], 0);
621 if (likely(rc != 31)) {
623 if (islit || rb != 31) {
624 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
625 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
627 tcg_gen_addi_i64(tmp, tmp, lit);
629 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
630 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
633 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
634 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
638 tcg_gen_movi_i64(cpu_ir[rc], lit);
640 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
642 tcg_gen_movi_i64(cpu_ir[rc], 0);
648 if (likely(rc != 31)) {
651 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
652 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
653 } else if (rb != 31) {
654 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
655 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
657 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[ra]);
660 tcg_gen_movi_i64(cpu_ir[rc], -lit);
662 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
663 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
665 tcg_gen_movi_i64(cpu_ir[rc], 0);
671 if (likely(rc != 31)) {
673 if (islit || rb != 31) {
674 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
675 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
677 tcg_gen_subi_i64(tmp, tmp, lit);
679 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
680 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
683 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
684 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
688 tcg_gen_movi_i64(cpu_ir[rc], -lit);
690 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
691 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
693 tcg_gen_movi_i64(cpu_ir[rc], 0);
699 gen_arith3(ctx, &gen_op_cmpbge, ra, rb, rc, islit, lit);
703 if (likely(rc != 31)) {
705 if (islit || rb != 31) {
706 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
707 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
709 tcg_gen_addi_i64(tmp, tmp, lit);
711 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
712 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
715 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
716 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
720 tcg_gen_movi_i64(cpu_ir[rc], lit);
722 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
724 tcg_gen_movi_i64(cpu_ir[rc], 0);
730 if (likely(rc != 31)) {
732 if (islit || rb != 31) {
733 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
734 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
736 tcg_gen_subi_i64(tmp, tmp, lit);
738 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
739 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
742 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
743 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
747 tcg_gen_movi_i64(cpu_ir[rc], -lit);
749 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
750 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
752 tcg_gen_movi_i64(cpu_ir[rc], 0);
758 gen_arith3(ctx, &gen_op_cmpult, ra, rb, rc, islit, lit);
762 if (likely(rc != 31)) {
765 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
767 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
769 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
772 tcg_gen_movi_i64(cpu_ir[rc], lit);
774 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
776 tcg_gen_movi_i64(cpu_ir[rc], 0);
782 if (likely(rc != 31)) {
784 if (islit || rb != 31) {
785 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
786 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
788 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
790 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
793 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
796 tcg_gen_movi_i64(cpu_ir[rc], lit);
798 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
800 tcg_gen_movi_i64(cpu_ir[rc], 0);
806 if (likely(rc != 31)) {
809 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
811 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
813 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
816 tcg_gen_movi_i64(cpu_ir[rc], -lit);
818 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
820 tcg_gen_movi_i64(cpu_ir[rc], 0);
826 if (likely(rc != 31)) {
828 if (islit || rb != 31) {
829 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
830 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
832 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
834 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
837 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
840 tcg_gen_movi_i64(cpu_ir[rc], -lit);
842 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
844 tcg_gen_movi_i64(cpu_ir[rc], 0);
850 gen_arith3(ctx, &gen_op_cmpeq, ra, rb, rc, islit, lit);
854 if (likely(rc != 31)) {
856 if (islit || rb != 31) {
857 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
858 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
860 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
862 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
865 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
868 tcg_gen_movi_i64(cpu_ir[rc], lit);
870 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
872 tcg_gen_movi_i64(cpu_ir[rc], 0);
878 if (likely(rc != 31)) {
880 if (islit || rb != 31) {
881 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
882 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
884 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
886 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
889 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
892 tcg_gen_movi_i64(cpu_ir[rc], -lit);
894 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
896 tcg_gen_movi_i64(cpu_ir[rc], 0);
902 gen_arith3(ctx, &gen_op_cmpule, ra, rb, rc, islit, lit);
906 gen_arith3(ctx, &gen_op_addlv, ra, rb, rc, islit, lit);
910 gen_arith3(ctx, &gen_op_sublv, ra, rb, rc, islit, lit);
914 gen_arith3(ctx, &gen_op_cmplt, ra, rb, rc, islit, lit);
918 gen_arith3(ctx, &gen_op_addqv, ra, rb, rc, islit, lit);
922 gen_arith3(ctx, &gen_op_subqv, ra, rb, rc, islit, lit);
926 gen_arith3(ctx, &gen_op_cmple, ra, rb, rc, islit, lit);
936 if (likely(rc != 31)) {
937 if (ra == 31 || (rb == 31 && !islit))
938 tcg_gen_movi_i64(cpu_ir[rc], 0);
940 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
942 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
947 if (likely(rc != 31)) {
950 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
952 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
953 tcg_gen_not_i64(tmp, cpu_ir[rb]);
954 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
957 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
959 tcg_gen_movi_i64(cpu_ir[rc], 0);
964 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
968 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 1);
972 if (likely(rc != 31)) {
975 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
977 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
979 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
982 tcg_gen_movi_i64(cpu_ir[rc], lit);
984 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
986 tcg_gen_movi_i64(cpu_ir[rc], 0);
992 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 0);
996 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1000 if (likely(rc != 31)) {
1001 if (rb == 31 && !islit)
1002 tcg_gen_movi_i64(cpu_ir[rc], ~0);
1003 else if (ra != 31) {
1005 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1007 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1008 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1009 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1014 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1016 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1022 if (likely(rc != 31)) {
1025 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1027 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1029 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1032 tcg_gen_movi_i64(cpu_ir[rc], lit);
1034 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1036 tcg_gen_movi_i64(cpu_ir[rc], 0);
1042 gen_cmov(ctx, TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1046 gen_cmov(ctx, TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1050 if (likely(rc != 31)) {
1053 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1054 else if (rb != 31) {
1055 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1056 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1057 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1060 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1063 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1065 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1067 tcg_gen_movi_i64(cpu_ir[rc], ~0);
1073 if (likely(rc != 31)) {
1075 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1077 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1079 tcg_gen_movi_i64(cpu_ir[rc], 0);
1084 gen_cmov(ctx, TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1088 gen_cmov(ctx, TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1092 gen_op_load_implver();
1094 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
1104 gen_arith3(ctx, &gen_op_mskbl, ra, rb, rc, islit, lit);
1108 gen_arith3(ctx, &gen_op_extbl, ra, rb, rc, islit, lit);
1112 gen_arith3(ctx, &gen_op_insbl, ra, rb, rc, islit, lit);
1116 gen_arith3(ctx, &gen_op_mskwl, ra, rb, rc, islit, lit);
1120 gen_arith3(ctx, &gen_op_extwl, ra, rb, rc, islit, lit);
1124 gen_arith3(ctx, &gen_op_inswl, ra, rb, rc, islit, lit);
1128 gen_arith3(ctx, &gen_op_mskll, ra, rb, rc, islit, lit);
1132 gen_arith3(ctx, &gen_op_extll, ra, rb, rc, islit, lit);
1136 gen_arith3(ctx, &gen_op_insll, ra, rb, rc, islit, lit);
1140 gen_arith3(ctx, &gen_op_zap, ra, rb, rc, islit, lit);
1144 gen_arith3(ctx, &gen_op_zapnot, ra, rb, rc, islit, lit);
1148 gen_arith3(ctx, &gen_op_mskql, ra, rb, rc, islit, lit);
1152 if (likely(rc != 31)) {
1155 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1156 else if (rb != 31) {
1157 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1158 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1159 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1160 tcg_temp_free(shift);
1162 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1164 tcg_gen_movi_i64(cpu_ir[rc], 0);
1169 gen_arith3(ctx, &gen_op_extql, ra, rb, rc, islit, lit);
1173 if (likely(rc != 31)) {
1176 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1177 else if (rb != 31) {
1178 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1179 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1180 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1181 tcg_temp_free(shift);
1183 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1185 tcg_gen_movi_i64(cpu_ir[rc], 0);
1190 gen_arith3(ctx, &gen_op_insql, ra, rb, rc, islit, lit);
1194 if (likely(rc != 31)) {
1197 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1198 else if (rb != 31) {
1199 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1200 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1201 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1202 tcg_temp_free(shift);
1204 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1206 tcg_gen_movi_i64(cpu_ir[rc], 0);
1211 gen_arith3(ctx, &gen_op_mskwh, ra, rb, rc, islit, lit);
1215 gen_arith3(ctx, &gen_op_inswh, ra, rb, rc, islit, lit);
1219 gen_arith3(ctx, &gen_op_extwh, ra, rb, rc, islit, lit);
1223 gen_arith3(ctx, &gen_op_msklh, ra, rb, rc, islit, lit);
1227 gen_arith3(ctx, &gen_op_inslh, ra, rb, rc, islit, lit);
1231 gen_arith3(ctx, &gen_op_extlh, ra, rb, rc, islit, lit);
1235 gen_arith3(ctx, &gen_op_mskqh, ra, rb, rc, islit, lit);
1239 gen_arith3(ctx, &gen_op_insqh, ra, rb, rc, islit, lit);
1243 gen_arith3(ctx, &gen_op_extqh, ra, rb, rc, islit, lit);
1253 if (likely(rc != 31)) {
1254 if (ra == 31 || (rb == 31 && !islit))
1255 tcg_gen_movi_i64(cpu_ir[rc], 0);
1258 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1260 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1261 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1267 if (likely(rc != 31)) {
1268 if (ra == 31 || (rb == 31 && !islit))
1269 tcg_gen_movi_i64(cpu_ir[rc], 0);
1271 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1273 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1278 gen_arith3(ctx, &gen_op_umulh, ra, rb, rc, islit, lit);
1282 gen_arith3(ctx, &gen_op_mullv, ra, rb, rc, islit, lit);
1286 gen_arith3(ctx, &gen_op_mulqv, ra, rb, rc, islit, lit);
1293 switch (fpfn) { /* f11 & 0x3F */
1296 if (!(ctx->amask & AMASK_FIX))
1298 gen_itf(ctx, &gen_op_itofs, ra, rc);
1302 if (!(ctx->amask & AMASK_FIX))
1304 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
1308 if (!(ctx->amask & AMASK_FIX))
1310 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
1314 if (!(ctx->amask & AMASK_FIX))
1317 gen_itf(ctx, &gen_op_itoff, ra, rc);
1324 if (!(ctx->amask & AMASK_FIX))
1326 gen_itf(ctx, &gen_op_itoft, ra, rc);
1330 if (!(ctx->amask & AMASK_FIX))
1332 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
1336 if (!(ctx->amask & AMASK_FIX))
1338 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
1345 /* VAX floating point */
1346 /* XXX: rounding mode and trap are ignored (!) */
1347 switch (fpfn) { /* f11 & 0x3F */
1350 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1354 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1358 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1362 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1367 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1374 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1378 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1382 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1386 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1390 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1394 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1398 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1402 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1407 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1414 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1418 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1422 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1429 /* IEEE floating-point */
1430 /* XXX: rounding mode and traps are ignored (!) */
1431 switch (fpfn) { /* f11 & 0x3F */
1434 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1438 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1442 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1446 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1450 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1454 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1458 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1462 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1466 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1470 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1474 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1478 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1481 /* XXX: incorrect */
1482 if (fn11 == 0x2AC) {
1484 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1487 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1492 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1496 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1500 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1510 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1515 if (ra == 31 && rc == 31) {
1520 gen_load_fir(ctx, rb, 0);
1521 gen_store_fir(ctx, rc, 0);
1524 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1529 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1533 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1537 gen_load_fir(ctx, ra, 0);
1538 gen_op_store_fpcr();
1543 gen_store_fir(ctx, ra, 0);
1547 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1551 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1555 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1559 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1563 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1567 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1571 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1575 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1579 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1586 switch ((uint16_t)disp16) {
1589 /* No-op. Just exit from the current tb */
1594 /* No-op. Just exit from the current tb */
1617 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1623 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1628 /* XXX: TODO: evict tb cache at address rb */
1639 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1651 /* HW_MFPR (PALcode) */
1652 #if defined (CONFIG_USER_ONLY)
1657 gen_op_mfpr(insn & 0xFF);
1659 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1664 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1666 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1668 tcg_gen_movi_i64(cpu_pc, 0);
1669 /* Those four jumps only differ by the branch prediction hint */
1687 /* HW_LD (PALcode) */
1688 #if defined (CONFIG_USER_ONLY)
1694 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1696 tcg_gen_movi_i64(cpu_T[0], 0);
1697 tcg_gen_movi_i64(cpu_T[1], disp12);
1698 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1699 switch ((insn >> 12) & 0xF) {
1701 /* Longword physical access */
1705 /* Quadword physical access */
1709 /* Longword physical access with lock */
1713 /* Quadword physical access with lock */
1717 /* Longword virtual PTE fetch */
1718 gen_op_ldl_kernel();
1721 /* Quadword virtual PTE fetch */
1722 gen_op_ldq_kernel();
1731 /* Longword virtual access */
1732 gen_op_ld_phys_to_virt();
1736 /* Quadword virtual access */
1737 gen_op_ld_phys_to_virt();
1741 /* Longword virtual access with protection check */
1745 /* Quadword virtual access with protection check */
1749 /* Longword virtual access with altenate access mode */
1750 gen_op_set_alt_mode();
1751 gen_op_ld_phys_to_virt();
1753 gen_op_restore_mode();
1756 /* Quadword virtual access with altenate access mode */
1757 gen_op_set_alt_mode();
1758 gen_op_ld_phys_to_virt();
1760 gen_op_restore_mode();
1763 /* Longword virtual access with alternate access mode and
1766 gen_op_set_alt_mode();
1768 gen_op_restore_mode();
1771 /* Quadword virtual access with alternate access mode and
1774 gen_op_set_alt_mode();
1776 gen_op_restore_mode();
1780 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1787 if (!(ctx->amask & AMASK_BWX))
1789 if (likely(rc != 31)) {
1791 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1793 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1795 tcg_gen_movi_i64(cpu_ir[rc], 0);
1800 if (!(ctx->amask & AMASK_BWX))
1802 if (likely(rc != 31)) {
1804 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1806 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1808 tcg_gen_movi_i64(cpu_ir[rc], 0);
1813 if (!(ctx->amask & AMASK_CIX))
1815 if (likely(rc != 31)) {
1817 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1819 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1821 tcg_gen_movi_i64(cpu_ir[rc], 0);
1826 if (!(ctx->amask & AMASK_MVI))
1833 if (!(ctx->amask & AMASK_CIX))
1835 if (likely(rc != 31)) {
1837 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1839 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1841 tcg_gen_movi_i64(cpu_ir[rc], 0);
1846 if (!(ctx->amask & AMASK_CIX))
1848 if (likely(rc != 31)) {
1850 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1852 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1854 tcg_gen_movi_i64(cpu_ir[rc], 0);
1859 if (!(ctx->amask & AMASK_MVI))
1866 if (!(ctx->amask & AMASK_MVI))
1873 if (!(ctx->amask & AMASK_MVI))
1880 if (!(ctx->amask & AMASK_MVI))
1887 if (!(ctx->amask & AMASK_MVI))
1894 if (!(ctx->amask & AMASK_MVI))
1901 if (!(ctx->amask & AMASK_MVI))
1908 if (!(ctx->amask & AMASK_MVI))
1915 if (!(ctx->amask & AMASK_MVI))
1922 if (!(ctx->amask & AMASK_MVI))
1929 if (!(ctx->amask & AMASK_MVI))
1936 if (!(ctx->amask & AMASK_MVI))
1943 if (!(ctx->amask & AMASK_FIX))
1945 gen_fti(ctx, &gen_op_ftoit, ra, rb);
1949 if (!(ctx->amask & AMASK_FIX))
1951 gen_fti(ctx, &gen_op_ftois, ra, rb);
1958 /* HW_MTPR (PALcode) */
1959 #if defined (CONFIG_USER_ONLY)
1965 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
1967 tcg_gen_movi_i64(cpu_T[0], 0);
1968 gen_op_mtpr(insn & 0xFF);
1973 /* HW_REI (PALcode) */
1974 #if defined (CONFIG_USER_ONLY)
1984 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1986 tcg_gen_movi_i64(cpu_T[0], 0);
1987 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
1988 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1995 /* HW_ST (PALcode) */
1996 #if defined (CONFIG_USER_ONLY)
2002 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
2004 tcg_gen_movi_i64(cpu_T[0], disp12);
2006 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
2008 tcg_gen_movi_i64(cpu_T[1], 0);
2009 switch ((insn >> 12) & 0xF) {
2011 /* Longword physical access */
2015 /* Quadword physical access */
2019 /* Longword physical access with lock */
2023 /* Quadword physical access with lock */
2027 /* Longword virtual access */
2028 gen_op_st_phys_to_virt();
2032 /* Quadword virtual access */
2033 gen_op_st_phys_to_virt();
2055 /* Longword virtual access with alternate access mode */
2056 gen_op_set_alt_mode();
2057 gen_op_st_phys_to_virt();
2059 gen_op_restore_mode();
2062 /* Quadword virtual access with alternate access mode */
2063 gen_op_set_alt_mode();
2064 gen_op_st_phys_to_virt();
2066 gen_op_restore_mode();
2081 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
2089 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
2096 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
2100 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
2105 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
2113 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
2120 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
2124 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
2128 gen_load_mem(ctx, &gen_ldl, ra, rb, disp16, 0);
2132 gen_load_mem(ctx, &gen_ldq, ra, rb, disp16, 0);
2136 gen_load_mem(ctx, &gen_ldl_l, ra, rb, disp16, 0);
2140 gen_load_mem(ctx, &gen_ldq_l, ra, rb, disp16, 0);
2144 gen_store_mem(ctx, &gen_stl, ra, rb, disp16, 0);
2148 gen_store_mem(ctx, &gen_stq, ra, rb, disp16, 0);
2152 gen_store_mem(ctx, &gen_stl_c, ra, rb, disp16, 0);
2156 gen_store_mem(ctx, &gen_stq_c, ra, rb, disp16, 0);
2161 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2162 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2167 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
2172 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
2177 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
2183 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2184 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2189 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
2194 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
2199 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
2204 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2209 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2214 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2219 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2224 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2229 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2234 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2239 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2251 static always_inline void gen_intermediate_code_internal (CPUState *env,
2252 TranslationBlock *tb,
2255 #if defined ALPHA_DEBUG_DISAS
2256 static int insn_count;
2258 DisasContext ctx, *ctxp = &ctx;
2259 target_ulong pc_start;
2261 uint16_t *gen_opc_end;
2268 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2270 ctx.amask = env->amask;
2271 #if defined (CONFIG_USER_ONLY)
2274 ctx.mem_idx = ((env->ps >> 3) & 3);
2275 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2278 max_insns = tb->cflags & CF_COUNT_MASK;
2280 max_insns = CF_COUNT_MASK;
2283 for (ret = 0; ret == 0;) {
2284 if (env->nb_breakpoints > 0) {
2285 for(j = 0; j < env->nb_breakpoints; j++) {
2286 if (env->breakpoints[j] == ctx.pc) {
2287 gen_excp(&ctx, EXCP_DEBUG, 0);
2293 j = gen_opc_ptr - gen_opc_buf;
2297 gen_opc_instr_start[lj++] = 0;
2298 gen_opc_pc[lj] = ctx.pc;
2299 gen_opc_instr_start[lj] = 1;
2300 gen_opc_icount[lj] = num_insns;
2303 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2305 #if defined ALPHA_DEBUG_DISAS
2307 if (logfile != NULL) {
2308 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2309 ctx.pc, ctx.mem_idx);
2312 insn = ldl_code(ctx.pc);
2313 #if defined ALPHA_DEBUG_DISAS
2315 if (logfile != NULL) {
2316 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2321 ret = translate_one(ctxp, insn);
2324 /* if we reach a page boundary or are single stepping, stop
2327 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2328 (env->singlestep_enabled) ||
2329 num_insns >= max_insns) {
2332 #if defined (DO_SINGLE_STEP)
2336 if (ret != 1 && ret != 3) {
2337 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2339 #if defined (DO_TB_FLUSH)
2340 tcg_gen_helper_0_0(helper_tb_flush);
2342 if (tb->cflags & CF_LAST_IO)
2344 /* Generate the return instruction */
2346 gen_icount_end(tb, num_insns);
2347 *gen_opc_ptr = INDEX_op_end;
2349 j = gen_opc_ptr - gen_opc_buf;
2352 gen_opc_instr_start[lj++] = 0;
2354 tb->size = ctx.pc - pc_start;
2355 tb->icount = num_insns;
2357 #if defined ALPHA_DEBUG_DISAS
2358 if (loglevel & CPU_LOG_TB_CPU) {
2359 cpu_dump_state(env, logfile, fprintf, 0);
2361 if (loglevel & CPU_LOG_TB_IN_ASM) {
2362 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2363 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2364 fprintf(logfile, "\n");
2369 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2371 gen_intermediate_code_internal(env, tb, 0);
2374 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2376 gen_intermediate_code_internal(env, tb, 1);
2379 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2384 env = qemu_mallocz(sizeof(CPUAlphaState));
2388 alpha_translate_init();
2390 /* XXX: should not be hardcoded */
2391 env->implver = IMPLVER_2106x;
2393 #if defined (CONFIG_USER_ONLY)
2397 /* Initialize IPR */
2398 hwpcb = env->ipr[IPR_PCBB];
2399 env->ipr[IPR_ASN] = 0;
2400 env->ipr[IPR_ASTEN] = 0;
2401 env->ipr[IPR_ASTSR] = 0;
2402 env->ipr[IPR_DATFX] = 0;
2404 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2405 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2406 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2407 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2408 env->ipr[IPR_FEN] = 0;
2409 env->ipr[IPR_IPL] = 31;
2410 env->ipr[IPR_MCES] = 0;
2411 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2412 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2413 env->ipr[IPR_SISR] = 0;
2414 env->ipr[IPR_VIRBND] = -1ULL;
2419 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2420 unsigned long searched_pc, int pc_pos, void *puc)
2422 env->pc = gen_opc_pc[pc_pos];