2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
35 #define ALPHA_DEBUG_DISAS
38 typedef struct DisasContext DisasContext;
42 #if !defined (CONFIG_USER_ONLY)
48 /* global register indexes */
50 static TCGv cpu_ir[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names[5*31];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init = 0;
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
77 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
78 offsetof(CPUState, t2), "T2");
80 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
81 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
82 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG3, "T2");
86 for (i = 0; i < 31; i++) {
87 sprintf(p, "ir%d", i);
88 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
89 offsetof(CPUState, ir[i]), p);
93 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
94 offsetof(CPUState, pc), "pc");
96 /* register helpers */
98 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
104 static always_inline void gen_op_nop (void)
106 #if defined(GENERATE_NOP)
111 #define GEN32(func, NAME) \
112 static GenOpFunc *NAME ## _table [32] = { \
113 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
114 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
115 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
116 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
117 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
118 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
119 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
120 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 static always_inline void func (int n) \
124 NAME ## _table[n](); \
128 /* Special hacks for ir31 */
129 #define gen_op_cmov_ir31 gen_op_nop
130 GEN32(gen_op_cmov_ir, gen_op_cmov_ir);
133 /* Special hacks for fir31 */
134 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
135 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
136 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
137 #define gen_op_store_FT0_fir31 gen_op_nop
138 #define gen_op_store_FT1_fir31 gen_op_nop
139 #define gen_op_store_FT2_fir31 gen_op_nop
140 #define gen_op_cmov_fir31 gen_op_nop
141 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
142 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
143 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
144 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
145 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
146 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
147 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
149 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
153 gen_op_load_FT0_fir(firn);
156 gen_op_load_FT1_fir(firn);
159 gen_op_load_FT2_fir(firn);
164 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
168 gen_op_store_FT0_fir(firn);
171 gen_op_store_FT1_fir(firn);
174 gen_op_store_FT2_fir(firn);
180 #if defined(CONFIG_USER_ONLY)
181 #define OP_LD_TABLE(width) \
182 static GenOpFunc *gen_op_ld##width[] = { \
183 &gen_op_ld##width##_raw, \
185 #define OP_ST_TABLE(width) \
186 static GenOpFunc *gen_op_st##width[] = { \
187 &gen_op_st##width##_raw, \
190 #define OP_LD_TABLE(width) \
191 static GenOpFunc *gen_op_ld##width[] = { \
192 &gen_op_ld##width##_kernel, \
193 &gen_op_ld##width##_executive, \
194 &gen_op_ld##width##_supervisor, \
195 &gen_op_ld##width##_user, \
197 #define OP_ST_TABLE(width) \
198 static GenOpFunc *gen_op_st##width[] = { \
199 &gen_op_st##width##_kernel, \
200 &gen_op_st##width##_executive, \
201 &gen_op_st##width##_supervisor, \
202 &gen_op_st##width##_user, \
206 #define GEN_LD(width) \
207 OP_LD_TABLE(width); \
208 static always_inline void gen_ld##width (DisasContext *ctx) \
210 (*gen_op_ld##width[ctx->mem_idx])(); \
213 #define GEN_ST(width) \
214 OP_ST_TABLE(width); \
215 static always_inline void gen_st##width (DisasContext *ctx) \
217 (*gen_op_st##width[ctx->mem_idx])(); \
235 #if 0 /* currently unused */
246 static always_inline void _gen_op_bcond (DisasContext *ctx)
248 #if 0 // Qemu does not know how to do this...
249 gen_op_bcond(ctx->pc);
251 gen_op_bcond(ctx->pc >> 32, ctx->pc);
255 static always_inline void gen_excp (DisasContext *ctx,
256 int exception, int error_code)
258 tcg_gen_movi_i64(cpu_pc, ctx->pc);
259 gen_op_excp(exception, error_code);
262 static always_inline void gen_invalid (DisasContext *ctx)
264 gen_excp(ctx, EXCP_OPCDEC, 0);
267 static always_inline void gen_load_mem (DisasContext *ctx,
268 void (*gen_load_op)(DisasContext *ctx),
269 int ra, int rb, int32_t disp16,
272 if (ra == 31 && disp16 == 0) {
277 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
279 tcg_gen_movi_i64(cpu_T[0], disp16);
281 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
284 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
288 static always_inline void gen_store_mem (DisasContext *ctx,
289 void (*gen_store_op)(DisasContext *ctx),
290 int ra, int rb, int32_t disp16,
294 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
296 tcg_gen_movi_i64(cpu_T[0], disp16);
298 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
300 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
302 tcg_gen_movi_i64(cpu_T[1], 0);
303 (*gen_store_op)(ctx);
306 static always_inline void gen_load_fmem (DisasContext *ctx,
307 void (*gen_load_fop)(DisasContext *ctx),
308 int ra, int rb, int32_t disp16)
311 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
313 tcg_gen_movi_i64(cpu_T[0], disp16);
314 (*gen_load_fop)(ctx);
315 gen_store_fir(ctx, ra, 1);
318 static always_inline void gen_store_fmem (DisasContext *ctx,
319 void (*gen_store_fop)(DisasContext *ctx),
320 int ra, int rb, int32_t disp16)
323 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
325 tcg_gen_movi_i64(cpu_T[0], disp16);
326 gen_load_fir(ctx, ra, 1);
327 (*gen_store_fop)(ctx);
330 static always_inline void gen_bcond (DisasContext *ctx,
331 void (*gen_test_op)(void),
332 int ra, int32_t disp16)
334 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
336 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
338 tcg_gen_movi_i64(cpu_T[0], 0);
343 static always_inline void gen_fbcond (DisasContext *ctx,
344 void (*gen_test_op)(void),
345 int ra, int32_t disp16)
347 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
348 gen_load_fir(ctx, ra, 0);
353 static always_inline void gen_arith3 (DisasContext *ctx,
354 void (*gen_arith_op)(void),
355 int ra, int rb, int rc,
356 int islit, uint8_t lit)
359 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
361 tcg_gen_movi_i64(cpu_T[0], 0);
363 tcg_gen_movi_i64(cpu_T[1], lit);
365 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
367 tcg_gen_movi_i64(cpu_T[1], 0);
370 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
373 static always_inline void gen_cmov (DisasContext *ctx,
374 void (*gen_test_op)(void),
375 int ra, int rb, int rc,
376 int islit, uint8_t lit)
379 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
381 tcg_gen_movi_i64(cpu_T[0], 0);
383 tcg_gen_movi_i64(cpu_T[1], lit);
385 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
387 tcg_gen_movi_i64(cpu_T[1], 0);
392 static always_inline void gen_farith2 (DisasContext *ctx,
393 void (*gen_arith_fop)(void),
396 gen_load_fir(ctx, rb, 0);
398 gen_store_fir(ctx, rc, 0);
401 static always_inline void gen_farith3 (DisasContext *ctx,
402 void (*gen_arith_fop)(void),
403 int ra, int rb, int rc)
405 gen_load_fir(ctx, ra, 0);
406 gen_load_fir(ctx, rb, 1);
408 gen_store_fir(ctx, rc, 0);
411 static always_inline void gen_fcmov (DisasContext *ctx,
412 void (*gen_test_fop)(void),
413 int ra, int rb, int rc)
415 gen_load_fir(ctx, ra, 0);
416 gen_load_fir(ctx, rb, 1);
421 static always_inline void gen_fti (DisasContext *ctx,
422 void (*gen_move_fop)(void),
425 gen_load_fir(ctx, rc, 0);
428 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
431 static always_inline void gen_itf (DisasContext *ctx,
432 void (*gen_move_fop)(void),
436 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
438 tcg_gen_movi_i64(cpu_T[0], 0);
440 gen_store_fir(ctx, rc, 0);
443 static always_inline void gen_s4addl (void)
445 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
449 static always_inline void gen_s4subl (void)
451 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
455 static always_inline void gen_s8addl (void)
457 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
461 static always_inline void gen_s8subl (void)
463 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
467 static always_inline void gen_s4addq (void)
469 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
470 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
473 static always_inline void gen_s4subq (void)
475 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
476 tcg_gen_sub_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
479 static always_inline void gen_s8addq (void)
481 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
482 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
485 static always_inline void gen_s8subq (void)
487 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
488 tcg_gen_sub_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
491 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
494 int32_t disp21, disp16, disp12;
496 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
500 /* Decode all instruction fields */
502 ra = (insn >> 21) & 0x1F;
503 rb = (insn >> 16) & 0x1F;
505 sbz = (insn >> 13) & 0x07;
506 islit = (insn >> 12) & 1;
507 lit = (insn >> 13) & 0xFF;
508 palcode = insn & 0x03FFFFFF;
509 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
510 disp16 = (int16_t)(insn & 0x0000FFFF);
511 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
512 fn16 = insn & 0x0000FFFF;
513 fn11 = (insn >> 5) & 0x000007FF;
515 fn7 = (insn >> 5) & 0x0000007F;
516 fn2 = (insn >> 5) & 0x00000003;
518 #if defined ALPHA_DEBUG_DISAS
519 if (logfile != NULL) {
520 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
521 opc, ra, rb, rc, disp16);
527 if (palcode >= 0x80 && palcode < 0xC0) {
528 /* Unprivileged PAL call */
529 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
530 #if !defined (CONFIG_USER_ONLY)
531 } else if (palcode < 0x40) {
532 /* Privileged PAL code */
533 if (ctx->mem_idx & 1)
536 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
539 /* Invalid PAL call */
569 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
571 tcg_gen_movi_i64(cpu_ir[ra], disp16);
578 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
580 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
585 if (!(ctx->amask & AMASK_BWX))
587 gen_load_mem(ctx, &gen_ldbu, ra, rb, disp16, 0);
591 gen_load_mem(ctx, &gen_ldq_u, ra, rb, disp16, 1);
595 if (!(ctx->amask & AMASK_BWX))
597 gen_load_mem(ctx, &gen_ldwu, ra, rb, disp16, 0);
601 if (!(ctx->amask & AMASK_BWX))
603 gen_store_mem(ctx, &gen_stw, ra, rb, disp16, 0);
607 if (!(ctx->amask & AMASK_BWX))
609 gen_store_mem(ctx, &gen_stb, ra, rb, disp16, 0);
613 gen_store_mem(ctx, &gen_stq_u, ra, rb, disp16, 1);
619 gen_arith3(ctx, &gen_op_addl, ra, rb, rc, islit, lit);
623 gen_arith3(ctx, &gen_s4addl, ra, rb, rc, islit, lit);
627 gen_arith3(ctx, &gen_op_subl, ra, rb, rc, islit, lit);
631 gen_arith3(ctx, &gen_s4subl, ra, rb, rc, islit, lit);
635 gen_arith3(ctx, &gen_op_cmpbge, ra, rb, rc, islit, lit);
639 gen_arith3(ctx, &gen_s8addl, ra, rb, rc, islit, lit);
643 gen_arith3(ctx, &gen_s8subl, ra, rb, rc, islit, lit);
647 gen_arith3(ctx, &gen_op_cmpult, ra, rb, rc, islit, lit);
651 gen_arith3(ctx, &gen_op_addq, ra, rb, rc, islit, lit);
655 gen_arith3(ctx, &gen_s4addq, ra, rb, rc, islit, lit);
659 gen_arith3(ctx, &gen_op_subq, ra, rb, rc, islit, lit);
663 gen_arith3(ctx, &gen_s4subq, ra, rb, rc, islit, lit);
667 gen_arith3(ctx, &gen_op_cmpeq, ra, rb, rc, islit, lit);
671 gen_arith3(ctx, &gen_s8addq, ra, rb, rc, islit, lit);
675 gen_arith3(ctx, &gen_s8subq, ra, rb, rc, islit, lit);
679 gen_arith3(ctx, &gen_op_cmpule, ra, rb, rc, islit, lit);
683 gen_arith3(ctx, &gen_op_addlv, ra, rb, rc, islit, lit);
687 gen_arith3(ctx, &gen_op_sublv, ra, rb, rc, islit, lit);
691 gen_arith3(ctx, &gen_op_cmplt, ra, rb, rc, islit, lit);
695 gen_arith3(ctx, &gen_op_addqv, ra, rb, rc, islit, lit);
699 gen_arith3(ctx, &gen_op_subqv, ra, rb, rc, islit, lit);
703 gen_arith3(ctx, &gen_op_cmple, ra, rb, rc, islit, lit);
713 gen_arith3(ctx, &gen_op_and, ra, rb, rc, islit, lit);
717 gen_arith3(ctx, &gen_op_bic, ra, rb, rc, islit, lit);
721 gen_cmov(ctx, &gen_op_cmplbs, ra, rb, rc, islit, lit);
725 gen_cmov(ctx, &gen_op_cmplbc, ra, rb, rc, islit, lit);
729 if (ra == rb || ra == 31 || rb == 31) {
730 if (ra == 31 && rc == 31) {
737 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
739 tcg_gen_movi_i64(cpu_ir[rc], 0);
743 gen_arith3(ctx, &gen_op_bis, ra, rb, rc, islit, lit);
748 gen_cmov(ctx, &gen_op_cmpeqz, ra, rb, rc, islit, lit);
752 gen_cmov(ctx, &gen_op_cmpnez, ra, rb, rc, islit, lit);
756 gen_arith3(ctx, &gen_op_ornot, ra, rb, rc, islit, lit);
760 gen_arith3(ctx, &gen_op_xor, ra, rb, rc, islit, lit);
764 gen_cmov(ctx, &gen_op_cmpltz, ra, rb, rc, islit, lit);
768 gen_cmov(ctx, &gen_op_cmpgez, ra, rb, rc, islit, lit);
772 gen_arith3(ctx, &gen_op_eqv, ra, rb, rc, islit, lit);
776 if (likely(rc != 31)) {
778 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
780 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
782 tcg_gen_movi_i64(cpu_ir[rc], 0);
787 gen_cmov(ctx, &gen_op_cmplez, ra, rb, rc, islit, lit);
791 gen_cmov(ctx, &gen_op_cmpgtz, ra, rb, rc, islit, lit);
795 gen_op_load_implver();
797 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
807 gen_arith3(ctx, &gen_op_mskbl, ra, rb, rc, islit, lit);
811 gen_arith3(ctx, &gen_op_extbl, ra, rb, rc, islit, lit);
815 gen_arith3(ctx, &gen_op_insbl, ra, rb, rc, islit, lit);
819 gen_arith3(ctx, &gen_op_mskwl, ra, rb, rc, islit, lit);
823 gen_arith3(ctx, &gen_op_extwl, ra, rb, rc, islit, lit);
827 gen_arith3(ctx, &gen_op_inswl, ra, rb, rc, islit, lit);
831 gen_arith3(ctx, &gen_op_mskll, ra, rb, rc, islit, lit);
835 gen_arith3(ctx, &gen_op_extll, ra, rb, rc, islit, lit);
839 gen_arith3(ctx, &gen_op_insll, ra, rb, rc, islit, lit);
843 gen_arith3(ctx, &gen_op_zap, ra, rb, rc, islit, lit);
847 gen_arith3(ctx, &gen_op_zapnot, ra, rb, rc, islit, lit);
851 gen_arith3(ctx, &gen_op_mskql, ra, rb, rc, islit, lit);
855 gen_arith3(ctx, &gen_op_srl, ra, rb, rc, islit, lit);
859 gen_arith3(ctx, &gen_op_extql, ra, rb, rc, islit, lit);
863 gen_arith3(ctx, &gen_op_sll, ra, rb, rc, islit, lit);
867 gen_arith3(ctx, &gen_op_insql, ra, rb, rc, islit, lit);
871 gen_arith3(ctx, &gen_op_sra, ra, rb, rc, islit, lit);
875 gen_arith3(ctx, &gen_op_mskwh, ra, rb, rc, islit, lit);
879 gen_arith3(ctx, &gen_op_inswh, ra, rb, rc, islit, lit);
883 gen_arith3(ctx, &gen_op_extwh, ra, rb, rc, islit, lit);
887 gen_arith3(ctx, &gen_op_msklh, ra, rb, rc, islit, lit);
891 gen_arith3(ctx, &gen_op_inslh, ra, rb, rc, islit, lit);
895 gen_arith3(ctx, &gen_op_extlh, ra, rb, rc, islit, lit);
899 gen_arith3(ctx, &gen_op_mskqh, ra, rb, rc, islit, lit);
903 gen_arith3(ctx, &gen_op_insqh, ra, rb, rc, islit, lit);
907 gen_arith3(ctx, &gen_op_extqh, ra, rb, rc, islit, lit);
917 gen_arith3(ctx, &gen_op_mull, ra, rb, rc, islit, lit);
921 gen_arith3(ctx, &gen_op_mulq, ra, rb, rc, islit, lit);
925 gen_arith3(ctx, &gen_op_umulh, ra, rb, rc, islit, lit);
929 gen_arith3(ctx, &gen_op_mullv, ra, rb, rc, islit, lit);
933 gen_arith3(ctx, &gen_op_mulqv, ra, rb, rc, islit, lit);
940 switch (fpfn) { /* f11 & 0x3F */
943 if (!(ctx->amask & AMASK_FIX))
945 gen_itf(ctx, &gen_op_itofs, ra, rc);
949 if (!(ctx->amask & AMASK_FIX))
951 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
955 if (!(ctx->amask & AMASK_FIX))
957 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
961 if (!(ctx->amask & AMASK_FIX))
964 gen_itf(ctx, &gen_op_itoff, ra, rc);
971 if (!(ctx->amask & AMASK_FIX))
973 gen_itf(ctx, &gen_op_itoft, ra, rc);
977 if (!(ctx->amask & AMASK_FIX))
979 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
983 if (!(ctx->amask & AMASK_FIX))
985 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
992 /* VAX floating point */
993 /* XXX: rounding mode and trap are ignored (!) */
994 switch (fpfn) { /* f11 & 0x3F */
997 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1001 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1005 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1009 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1014 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1021 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1025 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1029 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1033 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1037 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1041 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1045 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1049 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1054 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1061 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1065 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1069 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1076 /* IEEE floating-point */
1077 /* XXX: rounding mode and traps are ignored (!) */
1078 switch (fpfn) { /* f11 & 0x3F */
1081 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1085 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1089 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1093 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1097 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1101 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1105 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1109 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1113 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1117 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1121 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1125 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1128 /* XXX: incorrect */
1129 if (fn11 == 0x2AC) {
1131 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1134 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1139 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1143 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1147 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1157 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1162 if (ra == 31 && rc == 31) {
1167 gen_load_fir(ctx, rb, 0);
1168 gen_store_fir(ctx, rc, 0);
1171 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1176 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1180 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1184 gen_load_fir(ctx, ra, 0);
1185 gen_op_store_fpcr();
1190 gen_store_fir(ctx, ra, 0);
1194 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1198 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1202 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1206 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1210 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1214 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1218 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1222 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1226 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1233 switch ((uint16_t)disp16) {
1236 /* No-op. Just exit from the current tb */
1241 /* No-op. Just exit from the current tb */
1264 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1270 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1275 /* XXX: TODO: evict tb cache at address rb */
1286 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1298 /* HW_MFPR (PALcode) */
1299 #if defined (CONFIG_USER_ONLY)
1304 gen_op_mfpr(insn & 0xFF);
1306 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1311 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1313 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1315 tcg_gen_movi_i64(cpu_pc, 0);
1316 /* Those four jumps only differ by the branch prediction hint */
1334 /* HW_LD (PALcode) */
1335 #if defined (CONFIG_USER_ONLY)
1341 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1343 tcg_gen_movi_i64(cpu_T[0], 0);
1344 tcg_gen_movi_i64(cpu_T[1], disp12);
1345 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1346 switch ((insn >> 12) & 0xF) {
1348 /* Longword physical access */
1352 /* Quadword physical access */
1356 /* Longword physical access with lock */
1360 /* Quadword physical access with lock */
1364 /* Longword virtual PTE fetch */
1365 gen_op_ldl_kernel();
1368 /* Quadword virtual PTE fetch */
1369 gen_op_ldq_kernel();
1378 /* Longword virtual access */
1379 gen_op_ld_phys_to_virt();
1383 /* Quadword virtual access */
1384 gen_op_ld_phys_to_virt();
1388 /* Longword virtual access with protection check */
1392 /* Quadword virtual access with protection check */
1396 /* Longword virtual access with altenate access mode */
1397 gen_op_set_alt_mode();
1398 gen_op_ld_phys_to_virt();
1400 gen_op_restore_mode();
1403 /* Quadword virtual access with altenate access mode */
1404 gen_op_set_alt_mode();
1405 gen_op_ld_phys_to_virt();
1407 gen_op_restore_mode();
1410 /* Longword virtual access with alternate access mode and
1413 gen_op_set_alt_mode();
1415 gen_op_restore_mode();
1418 /* Quadword virtual access with alternate access mode and
1421 gen_op_set_alt_mode();
1423 gen_op_restore_mode();
1427 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1434 if (!(ctx->amask & AMASK_BWX))
1436 if (likely(rc != 31)) {
1438 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1440 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1442 tcg_gen_movi_i64(cpu_ir[rc], 0);
1447 if (!(ctx->amask & AMASK_BWX))
1449 if (likely(rc != 31)) {
1451 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1453 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1455 tcg_gen_movi_i64(cpu_ir[rc], 0);
1460 if (!(ctx->amask & AMASK_CIX))
1462 if (likely(rc != 31)) {
1464 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1466 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1468 tcg_gen_movi_i64(cpu_ir[rc], 0);
1473 if (!(ctx->amask & AMASK_MVI))
1480 if (!(ctx->amask & AMASK_CIX))
1482 if (likely(rc != 31)) {
1484 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1486 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1488 tcg_gen_movi_i64(cpu_ir[rc], 0);
1493 if (!(ctx->amask & AMASK_CIX))
1495 if (likely(rc != 31)) {
1497 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1499 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1501 tcg_gen_movi_i64(cpu_ir[rc], 0);
1506 if (!(ctx->amask & AMASK_MVI))
1513 if (!(ctx->amask & AMASK_MVI))
1520 if (!(ctx->amask & AMASK_MVI))
1527 if (!(ctx->amask & AMASK_MVI))
1534 if (!(ctx->amask & AMASK_MVI))
1541 if (!(ctx->amask & AMASK_MVI))
1548 if (!(ctx->amask & AMASK_MVI))
1555 if (!(ctx->amask & AMASK_MVI))
1562 if (!(ctx->amask & AMASK_MVI))
1569 if (!(ctx->amask & AMASK_MVI))
1576 if (!(ctx->amask & AMASK_MVI))
1583 if (!(ctx->amask & AMASK_MVI))
1590 if (!(ctx->amask & AMASK_FIX))
1592 gen_fti(ctx, &gen_op_ftoit, ra, rb);
1596 if (!(ctx->amask & AMASK_FIX))
1598 gen_fti(ctx, &gen_op_ftois, ra, rb);
1605 /* HW_MTPR (PALcode) */
1606 #if defined (CONFIG_USER_ONLY)
1612 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
1614 tcg_gen_movi_i64(cpu_T[0], 0);
1615 gen_op_mtpr(insn & 0xFF);
1620 /* HW_REI (PALcode) */
1621 #if defined (CONFIG_USER_ONLY)
1631 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1633 tcg_gen_movi_i64(cpu_T[0], 0);
1634 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
1635 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1642 /* HW_ST (PALcode) */
1643 #if defined (CONFIG_USER_ONLY)
1649 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
1651 tcg_gen_movi_i64(cpu_T[0], disp12);
1653 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
1655 tcg_gen_movi_i64(cpu_T[1], 0);
1656 switch ((insn >> 12) & 0xF) {
1658 /* Longword physical access */
1662 /* Quadword physical access */
1666 /* Longword physical access with lock */
1670 /* Quadword physical access with lock */
1674 /* Longword virtual access */
1675 gen_op_st_phys_to_virt();
1679 /* Quadword virtual access */
1680 gen_op_st_phys_to_virt();
1702 /* Longword virtual access with alternate access mode */
1703 gen_op_set_alt_mode();
1704 gen_op_st_phys_to_virt();
1706 gen_op_restore_mode();
1709 /* Quadword virtual access with alternate access mode */
1710 gen_op_set_alt_mode();
1711 gen_op_st_phys_to_virt();
1713 gen_op_restore_mode();
1728 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
1736 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
1743 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
1747 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
1752 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
1760 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
1767 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
1771 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
1775 gen_load_mem(ctx, &gen_ldl, ra, rb, disp16, 0);
1779 gen_load_mem(ctx, &gen_ldq, ra, rb, disp16, 0);
1783 gen_load_mem(ctx, &gen_ldl_l, ra, rb, disp16, 0);
1787 gen_load_mem(ctx, &gen_ldq_l, ra, rb, disp16, 0);
1791 gen_store_mem(ctx, &gen_stl, ra, rb, disp16, 0);
1795 gen_store_mem(ctx, &gen_stq, ra, rb, disp16, 0);
1799 gen_store_mem(ctx, &gen_stl_c, ra, rb, disp16, 0);
1803 gen_store_mem(ctx, &gen_stq_c, ra, rb, disp16, 0);
1808 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1809 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
1814 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
1819 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
1824 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
1830 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1831 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
1836 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
1841 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
1846 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
1851 gen_bcond(ctx, &gen_op_cmplbc, ra, disp16);
1856 gen_bcond(ctx, &gen_op_cmpeqz, ra, disp16);
1861 gen_bcond(ctx, &gen_op_cmpltz, ra, disp16);
1866 gen_bcond(ctx, &gen_op_cmplez, ra, disp16);
1871 gen_bcond(ctx, &gen_op_cmplbs, ra, disp16);
1876 gen_bcond(ctx, &gen_op_cmpnez, ra, disp16);
1881 gen_bcond(ctx, &gen_op_cmpgez, ra, disp16);
1886 gen_bcond(ctx, &gen_op_cmpgtz, ra, disp16);
1898 static always_inline void gen_intermediate_code_internal (CPUState *env,
1899 TranslationBlock *tb,
1902 #if defined ALPHA_DEBUG_DISAS
1903 static int insn_count;
1905 DisasContext ctx, *ctxp = &ctx;
1906 target_ulong pc_start;
1908 uint16_t *gen_opc_end;
1915 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1917 ctx.amask = env->amask;
1918 #if defined (CONFIG_USER_ONLY)
1921 ctx.mem_idx = ((env->ps >> 3) & 3);
1922 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
1925 max_insns = tb->cflags & CF_COUNT_MASK;
1927 max_insns = CF_COUNT_MASK;
1930 for (ret = 0; ret == 0;) {
1931 if (env->nb_breakpoints > 0) {
1932 for(j = 0; j < env->nb_breakpoints; j++) {
1933 if (env->breakpoints[j] == ctx.pc) {
1934 gen_excp(&ctx, EXCP_DEBUG, 0);
1940 j = gen_opc_ptr - gen_opc_buf;
1944 gen_opc_instr_start[lj++] = 0;
1945 gen_opc_pc[lj] = ctx.pc;
1946 gen_opc_instr_start[lj] = 1;
1947 gen_opc_icount[lj] = num_insns;
1950 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1952 #if defined ALPHA_DEBUG_DISAS
1954 if (logfile != NULL) {
1955 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
1956 ctx.pc, ctx.mem_idx);
1959 insn = ldl_code(ctx.pc);
1960 #if defined ALPHA_DEBUG_DISAS
1962 if (logfile != NULL) {
1963 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
1968 ret = translate_one(ctxp, insn);
1971 /* if we reach a page boundary or are single stepping, stop
1974 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
1975 (env->singlestep_enabled) ||
1976 num_insns >= max_insns) {
1979 #if defined (DO_SINGLE_STEP)
1983 if (ret != 1 && ret != 3) {
1984 tcg_gen_movi_i64(cpu_pc, ctx.pc);
1986 #if defined (DO_TB_FLUSH)
1987 tcg_gen_helper_0_0(helper_tb_flush);
1989 if (tb->cflags & CF_LAST_IO)
1991 /* Generate the return instruction */
1993 gen_icount_end(tb, num_insns);
1994 *gen_opc_ptr = INDEX_op_end;
1996 j = gen_opc_ptr - gen_opc_buf;
1999 gen_opc_instr_start[lj++] = 0;
2001 tb->size = ctx.pc - pc_start;
2002 tb->icount = num_insns;
2004 #if defined ALPHA_DEBUG_DISAS
2005 if (loglevel & CPU_LOG_TB_CPU) {
2006 cpu_dump_state(env, logfile, fprintf, 0);
2008 if (loglevel & CPU_LOG_TB_IN_ASM) {
2009 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2010 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2011 fprintf(logfile, "\n");
2016 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2018 gen_intermediate_code_internal(env, tb, 0);
2021 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2023 gen_intermediate_code_internal(env, tb, 1);
2026 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2031 env = qemu_mallocz(sizeof(CPUAlphaState));
2035 alpha_translate_init();
2037 /* XXX: should not be hardcoded */
2038 env->implver = IMPLVER_2106x;
2040 #if defined (CONFIG_USER_ONLY)
2044 /* Initialize IPR */
2045 hwpcb = env->ipr[IPR_PCBB];
2046 env->ipr[IPR_ASN] = 0;
2047 env->ipr[IPR_ASTEN] = 0;
2048 env->ipr[IPR_ASTSR] = 0;
2049 env->ipr[IPR_DATFX] = 0;
2051 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2052 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2053 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2054 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2055 env->ipr[IPR_FEN] = 0;
2056 env->ipr[IPR_IPL] = 31;
2057 env->ipr[IPR_MCES] = 0;
2058 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2059 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2060 env->ipr[IPR_SISR] = 0;
2061 env->ipr[IPR_VIRBND] = -1ULL;
2066 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2067 unsigned long searched_pc, int pc_pos, void *puc)
2069 env->pc = gen_opc_pc[pc_pos];