2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 #include "qemu-common.h"
32 #define DO_SINGLE_STEP
34 #define ALPHA_DEBUG_DISAS
37 typedef struct DisasContext DisasContext;
41 #if !defined (CONFIG_USER_ONLY)
47 /* global register indexes */
49 static TCGv cpu_ir[31];
52 /* dyngen register indexes */
56 static char cpu_reg_names[5*31];
58 #include "gen-icount.h"
60 static void alpha_translate_init(void)
64 static int done_init = 0;
69 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
71 #if TARGET_LONG_BITS > HOST_LONG_BITS
72 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
73 offsetof(CPUState, t0), "T0");
74 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
75 offsetof(CPUState, t1), "T1");
76 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
77 offsetof(CPUState, t2), "T2");
79 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
80 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
81 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG3, "T2");
85 for (i = 0; i < 31; i++) {
86 sprintf(p, "ir%d", i);
87 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
88 offsetof(CPUState, ir[i]), p);
92 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
93 offsetof(CPUState, pc), "pc");
95 /* register helpers */
97 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
103 static always_inline void gen_op_nop (void)
105 #if defined(GENERATE_NOP)
110 #define GEN32(func, NAME) \
111 static GenOpFunc *NAME ## _table [32] = { \
112 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
113 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
114 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
115 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
116 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
117 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
118 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
119 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
121 static always_inline void func (int n) \
123 NAME ## _table[n](); \
127 /* Special hacks for ir31 */
128 #define gen_op_cmov_ir31 gen_op_nop
129 GEN32(gen_op_cmov_ir, gen_op_cmov_ir);
132 /* Special hacks for fir31 */
133 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
134 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
135 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
136 #define gen_op_store_FT0_fir31 gen_op_nop
137 #define gen_op_store_FT1_fir31 gen_op_nop
138 #define gen_op_store_FT2_fir31 gen_op_nop
139 #define gen_op_cmov_fir31 gen_op_nop
140 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
141 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
142 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
143 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
144 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
145 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
146 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
148 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
152 gen_op_load_FT0_fir(firn);
155 gen_op_load_FT1_fir(firn);
158 gen_op_load_FT2_fir(firn);
163 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
167 gen_op_store_FT0_fir(firn);
170 gen_op_store_FT1_fir(firn);
173 gen_op_store_FT2_fir(firn);
179 #if defined(CONFIG_USER_ONLY)
180 #define OP_LD_TABLE(width) \
181 static GenOpFunc *gen_op_ld##width[] = { \
182 &gen_op_ld##width##_raw, \
184 #define OP_ST_TABLE(width) \
185 static GenOpFunc *gen_op_st##width[] = { \
186 &gen_op_st##width##_raw, \
189 #define OP_LD_TABLE(width) \
190 static GenOpFunc *gen_op_ld##width[] = { \
191 &gen_op_ld##width##_kernel, \
192 &gen_op_ld##width##_executive, \
193 &gen_op_ld##width##_supervisor, \
194 &gen_op_ld##width##_user, \
196 #define OP_ST_TABLE(width) \
197 static GenOpFunc *gen_op_st##width[] = { \
198 &gen_op_st##width##_kernel, \
199 &gen_op_st##width##_executive, \
200 &gen_op_st##width##_supervisor, \
201 &gen_op_st##width##_user, \
205 #define GEN_LD(width) \
206 OP_LD_TABLE(width); \
207 static always_inline void gen_ld##width (DisasContext *ctx) \
209 (*gen_op_ld##width[ctx->mem_idx])(); \
212 #define GEN_ST(width) \
213 OP_ST_TABLE(width); \
214 static always_inline void gen_st##width (DisasContext *ctx) \
216 (*gen_op_st##width[ctx->mem_idx])(); \
234 #if 0 /* currently unused */
245 static always_inline void _gen_op_bcond (DisasContext *ctx)
247 #if 0 // Qemu does not know how to do this...
248 gen_op_bcond(ctx->pc);
250 gen_op_bcond(ctx->pc >> 32, ctx->pc);
254 static always_inline void gen_excp (DisasContext *ctx,
255 int exception, int error_code)
257 tcg_gen_movi_i64(cpu_pc, ctx->pc);
258 gen_op_excp(exception, error_code);
261 static always_inline void gen_invalid (DisasContext *ctx)
263 gen_excp(ctx, EXCP_OPCDEC, 0);
266 static always_inline void gen_load_mem (DisasContext *ctx,
267 void (*gen_load_op)(DisasContext *ctx),
268 int ra, int rb, int32_t disp16,
271 if (ra == 31 && disp16 == 0) {
276 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
278 tcg_gen_movi_i64(cpu_T[0], disp16);
280 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
283 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
287 static always_inline void gen_store_mem (DisasContext *ctx,
288 void (*gen_store_op)(DisasContext *ctx),
289 int ra, int rb, int32_t disp16,
293 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
295 tcg_gen_movi_i64(cpu_T[0], disp16);
297 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
299 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
301 tcg_gen_movi_i64(cpu_T[1], 0);
302 (*gen_store_op)(ctx);
305 static always_inline void gen_load_fmem (DisasContext *ctx,
306 void (*gen_load_fop)(DisasContext *ctx),
307 int ra, int rb, int32_t disp16)
310 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
312 tcg_gen_movi_i64(cpu_T[0], disp16);
313 (*gen_load_fop)(ctx);
314 gen_store_fir(ctx, ra, 1);
317 static always_inline void gen_store_fmem (DisasContext *ctx,
318 void (*gen_store_fop)(DisasContext *ctx),
319 int ra, int rb, int32_t disp16)
322 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
324 tcg_gen_movi_i64(cpu_T[0], disp16);
325 gen_load_fir(ctx, ra, 1);
326 (*gen_store_fop)(ctx);
329 static always_inline void gen_bcond (DisasContext *ctx,
330 void (*gen_test_op)(void),
331 int ra, int32_t disp16)
333 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
335 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
337 tcg_gen_movi_i64(cpu_T[0], 0);
342 static always_inline void gen_fbcond (DisasContext *ctx,
343 void (*gen_test_op)(void),
344 int ra, int32_t disp16)
346 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
347 gen_load_fir(ctx, ra, 0);
352 static always_inline void gen_arith2 (DisasContext *ctx,
353 void (*gen_arith_op)(void),
354 int rb, int rc, int islit, uint8_t lit)
357 tcg_gen_movi_i64(cpu_T[0], lit);
359 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
361 tcg_gen_movi_i64(cpu_T[0], 0);
364 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
367 static always_inline void gen_arith3 (DisasContext *ctx,
368 void (*gen_arith_op)(void),
369 int ra, int rb, int rc,
370 int islit, uint8_t lit)
373 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
375 tcg_gen_movi_i64(cpu_T[0], 0);
377 tcg_gen_movi_i64(cpu_T[1], lit);
379 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
381 tcg_gen_movi_i64(cpu_T[1], 0);
384 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
387 static always_inline void gen_cmov (DisasContext *ctx,
388 void (*gen_test_op)(void),
389 int ra, int rb, int rc,
390 int islit, uint8_t lit)
393 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
395 tcg_gen_movi_i64(cpu_T[0], 0);
397 tcg_gen_movi_i64(cpu_T[1], lit);
399 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
401 tcg_gen_movi_i64(cpu_T[1], 0);
406 static always_inline void gen_farith2 (DisasContext *ctx,
407 void (*gen_arith_fop)(void),
410 gen_load_fir(ctx, rb, 0);
412 gen_store_fir(ctx, rc, 0);
415 static always_inline void gen_farith3 (DisasContext *ctx,
416 void (*gen_arith_fop)(void),
417 int ra, int rb, int rc)
419 gen_load_fir(ctx, ra, 0);
420 gen_load_fir(ctx, rb, 1);
422 gen_store_fir(ctx, rc, 0);
425 static always_inline void gen_fcmov (DisasContext *ctx,
426 void (*gen_test_fop)(void),
427 int ra, int rb, int rc)
429 gen_load_fir(ctx, ra, 0);
430 gen_load_fir(ctx, rb, 1);
435 static always_inline void gen_fti (DisasContext *ctx,
436 void (*gen_move_fop)(void),
439 gen_load_fir(ctx, rc, 0);
442 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
445 static always_inline void gen_itf (DisasContext *ctx,
446 void (*gen_move_fop)(void),
450 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
452 tcg_gen_movi_i64(cpu_T[0], 0);
454 gen_store_fir(ctx, rc, 0);
457 static always_inline void gen_s4addl (void)
459 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
463 static always_inline void gen_s4subl (void)
465 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
469 static always_inline void gen_s8addl (void)
471 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
475 static always_inline void gen_s8subl (void)
477 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
481 static always_inline void gen_s4addq (void)
483 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
484 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
487 static always_inline void gen_s4subq (void)
489 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
490 tcg_gen_sub_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
493 static always_inline void gen_s8addq (void)
495 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
496 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
499 static always_inline void gen_s8subq (void)
501 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 3);
502 tcg_gen_sub_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
505 static always_inline void gen_amask (void)
511 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
514 int32_t disp21, disp16, disp12;
516 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
520 /* Decode all instruction fields */
522 ra = (insn >> 21) & 0x1F;
523 rb = (insn >> 16) & 0x1F;
525 sbz = (insn >> 13) & 0x07;
526 islit = (insn >> 12) & 1;
527 lit = (insn >> 13) & 0xFF;
528 palcode = insn & 0x03FFFFFF;
529 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
530 disp16 = (int16_t)(insn & 0x0000FFFF);
531 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
532 fn16 = insn & 0x0000FFFF;
533 fn11 = (insn >> 5) & 0x000007FF;
535 fn7 = (insn >> 5) & 0x0000007F;
536 fn2 = (insn >> 5) & 0x00000003;
538 #if defined ALPHA_DEBUG_DISAS
539 if (logfile != NULL) {
540 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
541 opc, ra, rb, rc, disp16);
547 if (palcode >= 0x80 && palcode < 0xC0) {
548 /* Unprivileged PAL call */
549 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
550 #if !defined (CONFIG_USER_ONLY)
551 } else if (palcode < 0x40) {
552 /* Privileged PAL code */
553 if (ctx->mem_idx & 1)
556 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
559 /* Invalid PAL call */
589 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
591 tcg_gen_movi_i64(cpu_ir[ra], disp16);
598 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
600 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
605 if (!(ctx->amask & AMASK_BWX))
607 gen_load_mem(ctx, &gen_ldbu, ra, rb, disp16, 0);
611 gen_load_mem(ctx, &gen_ldq_u, ra, rb, disp16, 1);
615 if (!(ctx->amask & AMASK_BWX))
617 gen_load_mem(ctx, &gen_ldwu, ra, rb, disp16, 0);
621 if (!(ctx->amask & AMASK_BWX))
623 gen_store_mem(ctx, &gen_stw, ra, rb, disp16, 0);
627 if (!(ctx->amask & AMASK_BWX))
629 gen_store_mem(ctx, &gen_stb, ra, rb, disp16, 0);
633 gen_store_mem(ctx, &gen_stq_u, ra, rb, disp16, 1);
639 gen_arith3(ctx, &gen_op_addl, ra, rb, rc, islit, lit);
643 gen_arith3(ctx, &gen_s4addl, ra, rb, rc, islit, lit);
647 gen_arith3(ctx, &gen_op_subl, ra, rb, rc, islit, lit);
651 gen_arith3(ctx, &gen_s4subl, ra, rb, rc, islit, lit);
655 gen_arith3(ctx, &gen_op_cmpbge, ra, rb, rc, islit, lit);
659 gen_arith3(ctx, &gen_s8addl, ra, rb, rc, islit, lit);
663 gen_arith3(ctx, &gen_s8subl, ra, rb, rc, islit, lit);
667 gen_arith3(ctx, &gen_op_cmpult, ra, rb, rc, islit, lit);
671 gen_arith3(ctx, &gen_op_addq, ra, rb, rc, islit, lit);
675 gen_arith3(ctx, &gen_s4addq, ra, rb, rc, islit, lit);
679 gen_arith3(ctx, &gen_op_subq, ra, rb, rc, islit, lit);
683 gen_arith3(ctx, &gen_s4subq, ra, rb, rc, islit, lit);
687 gen_arith3(ctx, &gen_op_cmpeq, ra, rb, rc, islit, lit);
691 gen_arith3(ctx, &gen_s8addq, ra, rb, rc, islit, lit);
695 gen_arith3(ctx, &gen_s8subq, ra, rb, rc, islit, lit);
699 gen_arith3(ctx, &gen_op_cmpule, ra, rb, rc, islit, lit);
703 gen_arith3(ctx, &gen_op_addlv, ra, rb, rc, islit, lit);
707 gen_arith3(ctx, &gen_op_sublv, ra, rb, rc, islit, lit);
711 gen_arith3(ctx, &gen_op_cmplt, ra, rb, rc, islit, lit);
715 gen_arith3(ctx, &gen_op_addqv, ra, rb, rc, islit, lit);
719 gen_arith3(ctx, &gen_op_subqv, ra, rb, rc, islit, lit);
723 gen_arith3(ctx, &gen_op_cmple, ra, rb, rc, islit, lit);
733 gen_arith3(ctx, &gen_op_and, ra, rb, rc, islit, lit);
737 gen_arith3(ctx, &gen_op_bic, ra, rb, rc, islit, lit);
741 gen_cmov(ctx, &gen_op_cmplbs, ra, rb, rc, islit, lit);
745 gen_cmov(ctx, &gen_op_cmplbc, ra, rb, rc, islit, lit);
749 if (ra == rb || ra == 31 || rb == 31) {
750 if (ra == 31 && rc == 31) {
757 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
759 tcg_gen_movi_i64(cpu_ir[rc], 0);
763 gen_arith3(ctx, &gen_op_bis, ra, rb, rc, islit, lit);
768 gen_cmov(ctx, &gen_op_cmpeqz, ra, rb, rc, islit, lit);
772 gen_cmov(ctx, &gen_op_cmpnez, ra, rb, rc, islit, lit);
776 gen_arith3(ctx, &gen_op_ornot, ra, rb, rc, islit, lit);
780 gen_arith3(ctx, &gen_op_xor, ra, rb, rc, islit, lit);
784 gen_cmov(ctx, &gen_op_cmpltz, ra, rb, rc, islit, lit);
788 gen_cmov(ctx, &gen_op_cmpgez, ra, rb, rc, islit, lit);
792 gen_arith3(ctx, &gen_op_eqv, ra, rb, rc, islit, lit);
796 gen_arith2(ctx, &gen_amask, rb, rc, islit, lit);
800 gen_cmov(ctx, &gen_op_cmplez, ra, rb, rc, islit, lit);
804 gen_cmov(ctx, &gen_op_cmpgtz, ra, rb, rc, islit, lit);
808 gen_op_load_implver();
810 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
820 gen_arith3(ctx, &gen_op_mskbl, ra, rb, rc, islit, lit);
824 gen_arith3(ctx, &gen_op_extbl, ra, rb, rc, islit, lit);
828 gen_arith3(ctx, &gen_op_insbl, ra, rb, rc, islit, lit);
832 gen_arith3(ctx, &gen_op_mskwl, ra, rb, rc, islit, lit);
836 gen_arith3(ctx, &gen_op_extwl, ra, rb, rc, islit, lit);
840 gen_arith3(ctx, &gen_op_inswl, ra, rb, rc, islit, lit);
844 gen_arith3(ctx, &gen_op_mskll, ra, rb, rc, islit, lit);
848 gen_arith3(ctx, &gen_op_extll, ra, rb, rc, islit, lit);
852 gen_arith3(ctx, &gen_op_insll, ra, rb, rc, islit, lit);
856 gen_arith3(ctx, &gen_op_zap, ra, rb, rc, islit, lit);
860 gen_arith3(ctx, &gen_op_zapnot, ra, rb, rc, islit, lit);
864 gen_arith3(ctx, &gen_op_mskql, ra, rb, rc, islit, lit);
868 gen_arith3(ctx, &gen_op_srl, ra, rb, rc, islit, lit);
872 gen_arith3(ctx, &gen_op_extql, ra, rb, rc, islit, lit);
876 gen_arith3(ctx, &gen_op_sll, ra, rb, rc, islit, lit);
880 gen_arith3(ctx, &gen_op_insql, ra, rb, rc, islit, lit);
884 gen_arith3(ctx, &gen_op_sra, ra, rb, rc, islit, lit);
888 gen_arith3(ctx, &gen_op_mskwh, ra, rb, rc, islit, lit);
892 gen_arith3(ctx, &gen_op_inswh, ra, rb, rc, islit, lit);
896 gen_arith3(ctx, &gen_op_extwh, ra, rb, rc, islit, lit);
900 gen_arith3(ctx, &gen_op_msklh, ra, rb, rc, islit, lit);
904 gen_arith3(ctx, &gen_op_inslh, ra, rb, rc, islit, lit);
908 gen_arith3(ctx, &gen_op_extlh, ra, rb, rc, islit, lit);
912 gen_arith3(ctx, &gen_op_mskqh, ra, rb, rc, islit, lit);
916 gen_arith3(ctx, &gen_op_insqh, ra, rb, rc, islit, lit);
920 gen_arith3(ctx, &gen_op_extqh, ra, rb, rc, islit, lit);
930 gen_arith3(ctx, &gen_op_mull, ra, rb, rc, islit, lit);
934 gen_arith3(ctx, &gen_op_mulq, ra, rb, rc, islit, lit);
938 gen_arith3(ctx, &gen_op_umulh, ra, rb, rc, islit, lit);
942 gen_arith3(ctx, &gen_op_mullv, ra, rb, rc, islit, lit);
946 gen_arith3(ctx, &gen_op_mulqv, ra, rb, rc, islit, lit);
953 switch (fpfn) { /* f11 & 0x3F */
956 if (!(ctx->amask & AMASK_FIX))
958 gen_itf(ctx, &gen_op_itofs, ra, rc);
962 if (!(ctx->amask & AMASK_FIX))
964 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
968 if (!(ctx->amask & AMASK_FIX))
970 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
974 if (!(ctx->amask & AMASK_FIX))
977 gen_itf(ctx, &gen_op_itoff, ra, rc);
984 if (!(ctx->amask & AMASK_FIX))
986 gen_itf(ctx, &gen_op_itoft, ra, rc);
990 if (!(ctx->amask & AMASK_FIX))
992 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
996 if (!(ctx->amask & AMASK_FIX))
998 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
1005 /* VAX floating point */
1006 /* XXX: rounding mode and trap are ignored (!) */
1007 switch (fpfn) { /* f11 & 0x3F */
1010 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1014 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1018 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1022 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1027 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1034 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1038 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1042 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1046 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1050 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1054 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1058 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1062 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1067 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1074 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1078 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1082 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1089 /* IEEE floating-point */
1090 /* XXX: rounding mode and traps are ignored (!) */
1091 switch (fpfn) { /* f11 & 0x3F */
1094 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1098 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1102 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1106 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1110 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1114 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1118 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1122 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1126 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1130 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1134 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1138 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1141 /* XXX: incorrect */
1142 if (fn11 == 0x2AC) {
1144 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1147 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1152 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1156 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1160 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1170 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1175 if (ra == 31 && rc == 31) {
1180 gen_load_fir(ctx, rb, 0);
1181 gen_store_fir(ctx, rc, 0);
1184 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1189 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1193 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1197 gen_load_fir(ctx, ra, 0);
1198 gen_op_store_fpcr();
1203 gen_store_fir(ctx, ra, 0);
1207 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1211 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1215 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1219 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1223 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1227 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1231 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1235 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1239 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1246 switch ((uint16_t)disp16) {
1249 /* No-op. Just exit from the current tb */
1254 /* No-op. Just exit from the current tb */
1277 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1283 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1288 /* XXX: TODO: evict tb cache at address rb */
1299 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1311 /* HW_MFPR (PALcode) */
1312 #if defined (CONFIG_USER_ONLY)
1317 gen_op_mfpr(insn & 0xFF);
1319 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1324 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1326 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1328 tcg_gen_movi_i64(cpu_pc, 0);
1329 /* Those four jumps only differ by the branch prediction hint */
1347 /* HW_LD (PALcode) */
1348 #if defined (CONFIG_USER_ONLY)
1354 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1356 tcg_gen_movi_i64(cpu_T[0], 0);
1357 tcg_gen_movi_i64(cpu_T[1], disp12);
1358 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1359 switch ((insn >> 12) & 0xF) {
1361 /* Longword physical access */
1365 /* Quadword physical access */
1369 /* Longword physical access with lock */
1373 /* Quadword physical access with lock */
1377 /* Longword virtual PTE fetch */
1378 gen_op_ldl_kernel();
1381 /* Quadword virtual PTE fetch */
1382 gen_op_ldq_kernel();
1391 /* Longword virtual access */
1392 gen_op_ld_phys_to_virt();
1396 /* Quadword virtual access */
1397 gen_op_ld_phys_to_virt();
1401 /* Longword virtual access with protection check */
1405 /* Quadword virtual access with protection check */
1409 /* Longword virtual access with altenate access mode */
1410 gen_op_set_alt_mode();
1411 gen_op_ld_phys_to_virt();
1413 gen_op_restore_mode();
1416 /* Quadword virtual access with altenate access mode */
1417 gen_op_set_alt_mode();
1418 gen_op_ld_phys_to_virt();
1420 gen_op_restore_mode();
1423 /* Longword virtual access with alternate access mode and
1426 gen_op_set_alt_mode();
1428 gen_op_restore_mode();
1431 /* Quadword virtual access with alternate access mode and
1434 gen_op_set_alt_mode();
1436 gen_op_restore_mode();
1440 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1447 if (!(ctx->amask & AMASK_BWX))
1449 gen_arith2(ctx, &gen_op_sextb, rb, rc, islit, lit);
1453 if (!(ctx->amask & AMASK_BWX))
1455 gen_arith2(ctx, &gen_op_sextw, rb, rc, islit, lit);
1459 if (!(ctx->amask & AMASK_CIX))
1461 gen_arith2(ctx, &gen_op_ctpop, rb, rc, 0, 0);
1465 if (!(ctx->amask & AMASK_MVI))
1472 if (!(ctx->amask & AMASK_CIX))
1474 gen_arith2(ctx, &gen_op_ctlz, rb, rc, 0, 0);
1478 if (!(ctx->amask & AMASK_CIX))
1480 gen_arith2(ctx, &gen_op_cttz, rb, rc, 0, 0);
1484 if (!(ctx->amask & AMASK_MVI))
1491 if (!(ctx->amask & AMASK_MVI))
1498 if (!(ctx->amask & AMASK_MVI))
1505 if (!(ctx->amask & AMASK_MVI))
1512 if (!(ctx->amask & AMASK_MVI))
1519 if (!(ctx->amask & AMASK_MVI))
1526 if (!(ctx->amask & AMASK_MVI))
1533 if (!(ctx->amask & AMASK_MVI))
1540 if (!(ctx->amask & AMASK_MVI))
1547 if (!(ctx->amask & AMASK_MVI))
1554 if (!(ctx->amask & AMASK_MVI))
1561 if (!(ctx->amask & AMASK_MVI))
1568 if (!(ctx->amask & AMASK_FIX))
1570 gen_fti(ctx, &gen_op_ftoit, ra, rb);
1574 if (!(ctx->amask & AMASK_FIX))
1576 gen_fti(ctx, &gen_op_ftois, ra, rb);
1583 /* HW_MTPR (PALcode) */
1584 #if defined (CONFIG_USER_ONLY)
1590 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
1592 tcg_gen_movi_i64(cpu_T[0], 0);
1593 gen_op_mtpr(insn & 0xFF);
1598 /* HW_REI (PALcode) */
1599 #if defined (CONFIG_USER_ONLY)
1609 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1611 tcg_gen_movi_i64(cpu_T[0], 0);
1612 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
1613 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1620 /* HW_ST (PALcode) */
1621 #if defined (CONFIG_USER_ONLY)
1627 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
1629 tcg_gen_movi_i64(cpu_T[0], disp12);
1631 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
1633 tcg_gen_movi_i64(cpu_T[1], 0);
1634 switch ((insn >> 12) & 0xF) {
1636 /* Longword physical access */
1640 /* Quadword physical access */
1644 /* Longword physical access with lock */
1648 /* Quadword physical access with lock */
1652 /* Longword virtual access */
1653 gen_op_st_phys_to_virt();
1657 /* Quadword virtual access */
1658 gen_op_st_phys_to_virt();
1680 /* Longword virtual access with alternate access mode */
1681 gen_op_set_alt_mode();
1682 gen_op_st_phys_to_virt();
1684 gen_op_restore_mode();
1687 /* Quadword virtual access with alternate access mode */
1688 gen_op_set_alt_mode();
1689 gen_op_st_phys_to_virt();
1691 gen_op_restore_mode();
1706 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
1714 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
1721 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
1725 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
1730 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
1738 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
1745 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
1749 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
1753 gen_load_mem(ctx, &gen_ldl, ra, rb, disp16, 0);
1757 gen_load_mem(ctx, &gen_ldq, ra, rb, disp16, 0);
1761 gen_load_mem(ctx, &gen_ldl_l, ra, rb, disp16, 0);
1765 gen_load_mem(ctx, &gen_ldq_l, ra, rb, disp16, 0);
1769 gen_store_mem(ctx, &gen_stl, ra, rb, disp16, 0);
1773 gen_store_mem(ctx, &gen_stq, ra, rb, disp16, 0);
1777 gen_store_mem(ctx, &gen_stl_c, ra, rb, disp16, 0);
1781 gen_store_mem(ctx, &gen_stq_c, ra, rb, disp16, 0);
1786 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1787 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
1792 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
1797 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
1802 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
1808 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1809 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
1814 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
1819 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
1824 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
1829 gen_bcond(ctx, &gen_op_cmplbc, ra, disp16);
1834 gen_bcond(ctx, &gen_op_cmpeqz, ra, disp16);
1839 gen_bcond(ctx, &gen_op_cmpltz, ra, disp16);
1844 gen_bcond(ctx, &gen_op_cmplez, ra, disp16);
1849 gen_bcond(ctx, &gen_op_cmplbs, ra, disp16);
1854 gen_bcond(ctx, &gen_op_cmpnez, ra, disp16);
1859 gen_bcond(ctx, &gen_op_cmpgez, ra, disp16);
1864 gen_bcond(ctx, &gen_op_cmpgtz, ra, disp16);
1876 static always_inline void gen_intermediate_code_internal (CPUState *env,
1877 TranslationBlock *tb,
1880 #if defined ALPHA_DEBUG_DISAS
1881 static int insn_count;
1883 DisasContext ctx, *ctxp = &ctx;
1884 target_ulong pc_start;
1886 uint16_t *gen_opc_end;
1893 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1895 ctx.amask = env->amask;
1896 #if defined (CONFIG_USER_ONLY)
1899 ctx.mem_idx = ((env->ps >> 3) & 3);
1900 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
1903 max_insns = tb->cflags & CF_COUNT_MASK;
1905 max_insns = CF_COUNT_MASK;
1908 for (ret = 0; ret == 0;) {
1909 if (env->nb_breakpoints > 0) {
1910 for(j = 0; j < env->nb_breakpoints; j++) {
1911 if (env->breakpoints[j] == ctx.pc) {
1912 gen_excp(&ctx, EXCP_DEBUG, 0);
1918 j = gen_opc_ptr - gen_opc_buf;
1922 gen_opc_instr_start[lj++] = 0;
1923 gen_opc_pc[lj] = ctx.pc;
1924 gen_opc_instr_start[lj] = 1;
1925 gen_opc_icount[lj] = num_insns;
1928 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
1930 #if defined ALPHA_DEBUG_DISAS
1932 if (logfile != NULL) {
1933 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
1934 ctx.pc, ctx.mem_idx);
1937 insn = ldl_code(ctx.pc);
1938 #if defined ALPHA_DEBUG_DISAS
1940 if (logfile != NULL) {
1941 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
1946 ret = translate_one(ctxp, insn);
1949 /* if we reach a page boundary or are single stepping, stop
1952 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
1953 (env->singlestep_enabled) ||
1954 num_insns >= max_insns) {
1957 #if defined (DO_SINGLE_STEP)
1961 if (ret != 1 && ret != 3) {
1962 tcg_gen_movi_i64(cpu_pc, ctx.pc);
1964 #if defined (DO_TB_FLUSH)
1965 tcg_gen_helper_0_0(helper_tb_flush);
1967 if (tb->cflags & CF_LAST_IO)
1969 /* Generate the return instruction */
1971 gen_icount_end(tb, num_insns);
1972 *gen_opc_ptr = INDEX_op_end;
1974 j = gen_opc_ptr - gen_opc_buf;
1977 gen_opc_instr_start[lj++] = 0;
1979 tb->size = ctx.pc - pc_start;
1980 tb->icount = num_insns;
1982 #if defined ALPHA_DEBUG_DISAS
1983 if (loglevel & CPU_LOG_TB_CPU) {
1984 cpu_dump_state(env, logfile, fprintf, 0);
1986 if (loglevel & CPU_LOG_TB_IN_ASM) {
1987 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
1988 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
1989 fprintf(logfile, "\n");
1994 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
1996 gen_intermediate_code_internal(env, tb, 0);
1999 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2001 gen_intermediate_code_internal(env, tb, 1);
2004 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2009 env = qemu_mallocz(sizeof(CPUAlphaState));
2013 alpha_translate_init();
2015 /* XXX: should not be hardcoded */
2016 env->implver = IMPLVER_2106x;
2018 #if defined (CONFIG_USER_ONLY)
2022 /* Initialize IPR */
2023 hwpcb = env->ipr[IPR_PCBB];
2024 env->ipr[IPR_ASN] = 0;
2025 env->ipr[IPR_ASTEN] = 0;
2026 env->ipr[IPR_ASTSR] = 0;
2027 env->ipr[IPR_DATFX] = 0;
2029 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2030 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2031 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2032 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2033 env->ipr[IPR_FEN] = 0;
2034 env->ipr[IPR_IPL] = 31;
2035 env->ipr[IPR_MCES] = 0;
2036 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2037 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2038 env->ipr[IPR_SISR] = 0;
2039 env->ipr[IPR_VIRBND] = -1ULL;
2044 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2045 unsigned long searched_pc, int pc_pos, void *puc)
2047 env->pc = gen_opc_pc[pc_pos];