2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
35 #define ALPHA_DEBUG_DISAS
38 typedef struct DisasContext DisasContext;
42 #if !defined (CONFIG_USER_ONLY)
48 /* global register indexes */
50 static TCGv cpu_ir[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names[5*31];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init = 0;
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
77 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
78 offsetof(CPUState, t2), "T2");
80 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
81 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
82 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG3, "T2");
86 for (i = 0; i < 31; i++) {
87 sprintf(p, "ir%d", i);
88 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
89 offsetof(CPUState, ir[i]), p);
93 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
94 offsetof(CPUState, pc), "pc");
96 /* register helpers */
98 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
104 static always_inline void gen_op_nop (void)
106 #if defined(GENERATE_NOP)
111 #define GEN32(func, NAME) \
112 static GenOpFunc *NAME ## _table [32] = { \
113 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
114 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
115 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
116 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
117 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
118 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
119 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
120 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 static always_inline void func (int n) \
124 NAME ## _table[n](); \
128 /* Special hacks for ir31 */
129 #define gen_op_cmov_ir31 gen_op_nop
130 GEN32(gen_op_cmov_ir, gen_op_cmov_ir);
133 /* Special hacks for fir31 */
134 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
135 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
136 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
137 #define gen_op_store_FT0_fir31 gen_op_nop
138 #define gen_op_store_FT1_fir31 gen_op_nop
139 #define gen_op_store_FT2_fir31 gen_op_nop
140 #define gen_op_cmov_fir31 gen_op_nop
141 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
142 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
143 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
144 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
145 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
146 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
147 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
149 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
153 gen_op_load_FT0_fir(firn);
156 gen_op_load_FT1_fir(firn);
159 gen_op_load_FT2_fir(firn);
164 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
168 gen_op_store_FT0_fir(firn);
171 gen_op_store_FT1_fir(firn);
174 gen_op_store_FT2_fir(firn);
180 #if defined(CONFIG_USER_ONLY)
181 #define OP_LD_TABLE(width) \
182 static GenOpFunc *gen_op_ld##width[] = { \
183 &gen_op_ld##width##_raw, \
185 #define OP_ST_TABLE(width) \
186 static GenOpFunc *gen_op_st##width[] = { \
187 &gen_op_st##width##_raw, \
190 #define OP_LD_TABLE(width) \
191 static GenOpFunc *gen_op_ld##width[] = { \
192 &gen_op_ld##width##_kernel, \
193 &gen_op_ld##width##_executive, \
194 &gen_op_ld##width##_supervisor, \
195 &gen_op_ld##width##_user, \
197 #define OP_ST_TABLE(width) \
198 static GenOpFunc *gen_op_st##width[] = { \
199 &gen_op_st##width##_kernel, \
200 &gen_op_st##width##_executive, \
201 &gen_op_st##width##_supervisor, \
202 &gen_op_st##width##_user, \
206 #define GEN_LD(width) \
207 OP_LD_TABLE(width); \
208 static always_inline void gen_ld##width (DisasContext *ctx) \
210 (*gen_op_ld##width[ctx->mem_idx])(); \
213 #define GEN_ST(width) \
214 OP_ST_TABLE(width); \
215 static always_inline void gen_st##width (DisasContext *ctx) \
217 (*gen_op_st##width[ctx->mem_idx])(); \
235 #if 0 /* currently unused */
246 static always_inline void _gen_op_bcond (DisasContext *ctx)
248 #if 0 // Qemu does not know how to do this...
249 gen_op_bcond(ctx->pc);
251 gen_op_bcond(ctx->pc >> 32, ctx->pc);
255 static always_inline void gen_excp (DisasContext *ctx,
256 int exception, int error_code)
258 tcg_gen_movi_i64(cpu_pc, ctx->pc);
259 gen_op_excp(exception, error_code);
262 static always_inline void gen_invalid (DisasContext *ctx)
264 gen_excp(ctx, EXCP_OPCDEC, 0);
267 static always_inline void gen_load_mem (DisasContext *ctx,
268 void (*gen_load_op)(DisasContext *ctx),
269 int ra, int rb, int32_t disp16,
272 if (ra == 31 && disp16 == 0) {
277 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
279 tcg_gen_movi_i64(cpu_T[0], disp16);
281 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
284 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
288 static always_inline void gen_store_mem (DisasContext *ctx,
289 void (*gen_store_op)(DisasContext *ctx),
290 int ra, int rb, int32_t disp16,
294 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
296 tcg_gen_movi_i64(cpu_T[0], disp16);
298 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
300 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
302 tcg_gen_movi_i64(cpu_T[1], 0);
303 (*gen_store_op)(ctx);
306 static always_inline void gen_load_fmem (DisasContext *ctx,
307 void (*gen_load_fop)(DisasContext *ctx),
308 int ra, int rb, int32_t disp16)
311 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
313 tcg_gen_movi_i64(cpu_T[0], disp16);
314 (*gen_load_fop)(ctx);
315 gen_store_fir(ctx, ra, 1);
318 static always_inline void gen_store_fmem (DisasContext *ctx,
319 void (*gen_store_fop)(DisasContext *ctx),
320 int ra, int rb, int32_t disp16)
323 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
325 tcg_gen_movi_i64(cpu_T[0], disp16);
326 gen_load_fir(ctx, ra, 1);
327 (*gen_store_fop)(ctx);
330 static always_inline void gen_bcond (DisasContext *ctx,
331 void (*gen_test_op)(void),
332 int ra, int32_t disp16)
334 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
336 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
338 tcg_gen_movi_i64(cpu_T[0], 0);
343 static always_inline void gen_fbcond (DisasContext *ctx,
344 void (*gen_test_op)(void),
345 int ra, int32_t disp16)
347 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
348 gen_load_fir(ctx, ra, 0);
353 static always_inline void gen_arith3 (DisasContext *ctx,
354 void (*gen_arith_op)(void),
355 int ra, int rb, int rc,
356 int islit, uint8_t lit)
359 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
361 tcg_gen_movi_i64(cpu_T[0], 0);
363 tcg_gen_movi_i64(cpu_T[1], lit);
365 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
367 tcg_gen_movi_i64(cpu_T[1], 0);
370 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
373 static always_inline void gen_cmov (DisasContext *ctx,
374 void (*gen_test_op)(void),
375 int ra, int rb, int rc,
376 int islit, uint8_t lit)
379 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
381 tcg_gen_movi_i64(cpu_T[0], 0);
383 tcg_gen_movi_i64(cpu_T[1], lit);
385 tcg_gen_mov_i64(cpu_T[1], cpu_ir[rb]);
387 tcg_gen_movi_i64(cpu_T[1], 0);
392 static always_inline void gen_farith2 (DisasContext *ctx,
393 void (*gen_arith_fop)(void),
396 gen_load_fir(ctx, rb, 0);
398 gen_store_fir(ctx, rc, 0);
401 static always_inline void gen_farith3 (DisasContext *ctx,
402 void (*gen_arith_fop)(void),
403 int ra, int rb, int rc)
405 gen_load_fir(ctx, ra, 0);
406 gen_load_fir(ctx, rb, 1);
408 gen_store_fir(ctx, rc, 0);
411 static always_inline void gen_fcmov (DisasContext *ctx,
412 void (*gen_test_fop)(void),
413 int ra, int rb, int rc)
415 gen_load_fir(ctx, ra, 0);
416 gen_load_fir(ctx, rb, 1);
421 static always_inline void gen_fti (DisasContext *ctx,
422 void (*gen_move_fop)(void),
425 gen_load_fir(ctx, rc, 0);
428 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
431 static always_inline void gen_itf (DisasContext *ctx,
432 void (*gen_move_fop)(void),
436 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
438 tcg_gen_movi_i64(cpu_T[0], 0);
440 gen_store_fir(ctx, rc, 0);
443 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
446 int32_t disp21, disp16, disp12;
448 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
452 /* Decode all instruction fields */
454 ra = (insn >> 21) & 0x1F;
455 rb = (insn >> 16) & 0x1F;
457 sbz = (insn >> 13) & 0x07;
458 islit = (insn >> 12) & 1;
459 lit = (insn >> 13) & 0xFF;
460 palcode = insn & 0x03FFFFFF;
461 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
462 disp16 = (int16_t)(insn & 0x0000FFFF);
463 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
464 fn16 = insn & 0x0000FFFF;
465 fn11 = (insn >> 5) & 0x000007FF;
467 fn7 = (insn >> 5) & 0x0000007F;
468 fn2 = (insn >> 5) & 0x00000003;
470 #if defined ALPHA_DEBUG_DISAS
471 if (logfile != NULL) {
472 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
473 opc, ra, rb, rc, disp16);
479 if (palcode >= 0x80 && palcode < 0xC0) {
480 /* Unprivileged PAL call */
481 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
482 #if !defined (CONFIG_USER_ONLY)
483 } else if (palcode < 0x40) {
484 /* Privileged PAL code */
485 if (ctx->mem_idx & 1)
488 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
491 /* Invalid PAL call */
521 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
523 tcg_gen_movi_i64(cpu_ir[ra], disp16);
530 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
532 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
537 if (!(ctx->amask & AMASK_BWX))
539 gen_load_mem(ctx, &gen_ldbu, ra, rb, disp16, 0);
543 gen_load_mem(ctx, &gen_ldq_u, ra, rb, disp16, 1);
547 if (!(ctx->amask & AMASK_BWX))
549 gen_load_mem(ctx, &gen_ldwu, ra, rb, disp16, 0);
553 if (!(ctx->amask & AMASK_BWX))
555 gen_store_mem(ctx, &gen_stw, ra, rb, disp16, 0);
559 if (!(ctx->amask & AMASK_BWX))
561 gen_store_mem(ctx, &gen_stb, ra, rb, disp16, 0);
565 gen_store_mem(ctx, &gen_stq_u, ra, rb, disp16, 1);
571 if (likely(rc != 31)) {
574 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
575 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
576 } else if (rb != 31) {
577 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
578 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
580 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[ra]);
583 tcg_gen_movi_i64(cpu_ir[rc], (int32_t)lit);
585 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
587 tcg_gen_movi_i64(cpu_ir[rc], 0);
593 if (likely(rc != 31)) {
595 if (islit || rb != 31) {
596 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
597 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
599 tcg_gen_addi_i64(tmp, tmp, lit);
601 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
602 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
605 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
606 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
610 tcg_gen_movi_i64(cpu_ir[rc], lit);
612 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
614 tcg_gen_movi_i64(cpu_ir[rc], 0);
620 if (likely(rc != 31)) {
623 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
624 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
625 } else if (rb != 31) {
626 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
627 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
629 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[ra]);
632 tcg_gen_movi_i64(cpu_ir[rc], -lit);
634 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
635 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
637 tcg_gen_movi_i64(cpu_ir[rc], 0);
643 if (likely(rc != 31)) {
645 if (islit || rb != 31) {
646 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
647 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
649 tcg_gen_subi_i64(tmp, tmp, lit);
651 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
652 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
655 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
656 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
660 tcg_gen_movi_i64(cpu_ir[rc], -lit);
662 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
663 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
665 tcg_gen_movi_i64(cpu_ir[rc], 0);
671 gen_arith3(ctx, &gen_op_cmpbge, ra, rb, rc, islit, lit);
675 if (likely(rc != 31)) {
677 if (islit || rb != 31) {
678 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
679 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
681 tcg_gen_addi_i64(tmp, tmp, lit);
683 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
684 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
687 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
688 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
692 tcg_gen_movi_i64(cpu_ir[rc], lit);
694 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
696 tcg_gen_movi_i64(cpu_ir[rc], 0);
702 if (likely(rc != 31)) {
704 if (islit || rb != 31) {
705 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
706 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
708 tcg_gen_subi_i64(tmp, tmp, lit);
710 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
711 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
714 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
715 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
719 tcg_gen_movi_i64(cpu_ir[rc], -lit);
721 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
722 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
724 tcg_gen_movi_i64(cpu_ir[rc], 0);
730 gen_arith3(ctx, &gen_op_cmpult, ra, rb, rc, islit, lit);
734 if (likely(rc != 31)) {
737 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
739 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
741 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
744 tcg_gen_movi_i64(cpu_ir[rc], lit);
746 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
748 tcg_gen_movi_i64(cpu_ir[rc], 0);
754 if (likely(rc != 31)) {
756 if (islit || rb != 31) {
757 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
758 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
760 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
762 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
765 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
768 tcg_gen_movi_i64(cpu_ir[rc], lit);
770 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
772 tcg_gen_movi_i64(cpu_ir[rc], 0);
778 if (likely(rc != 31)) {
781 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
783 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
785 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
788 tcg_gen_movi_i64(cpu_ir[rc], -lit);
790 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
792 tcg_gen_movi_i64(cpu_ir[rc], 0);
798 if (likely(rc != 31)) {
800 if (islit || rb != 31) {
801 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
802 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
804 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
806 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
809 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 2);
812 tcg_gen_movi_i64(cpu_ir[rc], -lit);
814 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
816 tcg_gen_movi_i64(cpu_ir[rc], 0);
822 gen_arith3(ctx, &gen_op_cmpeq, ra, rb, rc, islit, lit);
826 if (likely(rc != 31)) {
828 if (islit || rb != 31) {
829 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
830 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
832 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
834 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
837 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
840 tcg_gen_movi_i64(cpu_ir[rc], lit);
842 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
844 tcg_gen_movi_i64(cpu_ir[rc], 0);
850 if (likely(rc != 31)) {
852 if (islit || rb != 31) {
853 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
854 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
856 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
858 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
861 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 3);
864 tcg_gen_movi_i64(cpu_ir[rc], -lit);
866 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
868 tcg_gen_movi_i64(cpu_ir[rc], 0);
874 gen_arith3(ctx, &gen_op_cmpule, ra, rb, rc, islit, lit);
878 gen_arith3(ctx, &gen_op_addlv, ra, rb, rc, islit, lit);
882 gen_arith3(ctx, &gen_op_sublv, ra, rb, rc, islit, lit);
886 gen_arith3(ctx, &gen_op_cmplt, ra, rb, rc, islit, lit);
890 gen_arith3(ctx, &gen_op_addqv, ra, rb, rc, islit, lit);
894 gen_arith3(ctx, &gen_op_subqv, ra, rb, rc, islit, lit);
898 gen_arith3(ctx, &gen_op_cmple, ra, rb, rc, islit, lit);
908 if (likely(rc != 31)) {
909 if (ra == 31 || (rb == 31 && !islit))
910 tcg_gen_movi_i64(cpu_ir[rc], 0);
912 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
914 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
919 if (likely(rc != 31)) {
922 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
924 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
925 tcg_gen_not_i64(tmp, cpu_ir[rb]);
926 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
929 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
931 tcg_gen_movi_i64(cpu_ir[rc], 0);
936 gen_cmov(ctx, &gen_op_cmplbs, ra, rb, rc, islit, lit);
940 gen_cmov(ctx, &gen_op_cmplbc, ra, rb, rc, islit, lit);
944 if (likely(rc != 31)) {
947 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
949 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
951 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
954 tcg_gen_movi_i64(cpu_ir[rc], lit);
956 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
958 tcg_gen_movi_i64(cpu_ir[rc], 0);
964 gen_cmov(ctx, &gen_op_cmpeqz, ra, rb, rc, islit, lit);
968 gen_cmov(ctx, &gen_op_cmpnez, ra, rb, rc, islit, lit);
972 if (likely(rc != 31)) {
973 if (rb == 31 && !islit)
974 tcg_gen_movi_i64(cpu_ir[rc], ~0);
977 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
979 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
980 tcg_gen_not_i64(tmp, cpu_ir[rb]);
981 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
986 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
988 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
994 if (likely(rc != 31)) {
997 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
999 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1001 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1004 tcg_gen_movi_i64(cpu_ir[rc], lit);
1006 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1008 tcg_gen_movi_i64(cpu_ir[rc], 0);
1014 gen_cmov(ctx, &gen_op_cmpltz, ra, rb, rc, islit, lit);
1018 gen_cmov(ctx, &gen_op_cmpgez, ra, rb, rc, islit, lit);
1022 if (likely(rc != 31)) {
1025 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1026 else if (rb != 31) {
1027 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1028 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1029 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1032 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1035 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1037 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1039 tcg_gen_movi_i64(cpu_ir[rc], ~0);
1045 if (likely(rc != 31)) {
1047 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1049 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1051 tcg_gen_movi_i64(cpu_ir[rc], 0);
1056 gen_cmov(ctx, &gen_op_cmplez, ra, rb, rc, islit, lit);
1060 gen_cmov(ctx, &gen_op_cmpgtz, ra, rb, rc, islit, lit);
1064 gen_op_load_implver();
1066 tcg_gen_mov_i64(cpu_ir[rc], cpu_T[0]);
1076 gen_arith3(ctx, &gen_op_mskbl, ra, rb, rc, islit, lit);
1080 gen_arith3(ctx, &gen_op_extbl, ra, rb, rc, islit, lit);
1084 gen_arith3(ctx, &gen_op_insbl, ra, rb, rc, islit, lit);
1088 gen_arith3(ctx, &gen_op_mskwl, ra, rb, rc, islit, lit);
1092 gen_arith3(ctx, &gen_op_extwl, ra, rb, rc, islit, lit);
1096 gen_arith3(ctx, &gen_op_inswl, ra, rb, rc, islit, lit);
1100 gen_arith3(ctx, &gen_op_mskll, ra, rb, rc, islit, lit);
1104 gen_arith3(ctx, &gen_op_extll, ra, rb, rc, islit, lit);
1108 gen_arith3(ctx, &gen_op_insll, ra, rb, rc, islit, lit);
1112 gen_arith3(ctx, &gen_op_zap, ra, rb, rc, islit, lit);
1116 gen_arith3(ctx, &gen_op_zapnot, ra, rb, rc, islit, lit);
1120 gen_arith3(ctx, &gen_op_mskql, ra, rb, rc, islit, lit);
1124 if (likely(rc != 31)) {
1127 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1128 else if (rb != 31) {
1129 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1130 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1131 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1132 tcg_temp_free(shift);
1134 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1136 tcg_gen_movi_i64(cpu_ir[rc], 0);
1141 gen_arith3(ctx, &gen_op_extql, ra, rb, rc, islit, lit);
1145 if (likely(rc != 31)) {
1148 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1149 else if (rb != 31) {
1150 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1151 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1152 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1153 tcg_temp_free(shift);
1155 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1157 tcg_gen_movi_i64(cpu_ir[rc], 0);
1162 gen_arith3(ctx, &gen_op_insql, ra, rb, rc, islit, lit);
1166 if (likely(rc != 31)) {
1169 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1170 else if (rb != 31) {
1171 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1172 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1173 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1174 tcg_temp_free(shift);
1176 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
1178 tcg_gen_movi_i64(cpu_ir[rc], 0);
1183 gen_arith3(ctx, &gen_op_mskwh, ra, rb, rc, islit, lit);
1187 gen_arith3(ctx, &gen_op_inswh, ra, rb, rc, islit, lit);
1191 gen_arith3(ctx, &gen_op_extwh, ra, rb, rc, islit, lit);
1195 gen_arith3(ctx, &gen_op_msklh, ra, rb, rc, islit, lit);
1199 gen_arith3(ctx, &gen_op_inslh, ra, rb, rc, islit, lit);
1203 gen_arith3(ctx, &gen_op_extlh, ra, rb, rc, islit, lit);
1207 gen_arith3(ctx, &gen_op_mskqh, ra, rb, rc, islit, lit);
1211 gen_arith3(ctx, &gen_op_insqh, ra, rb, rc, islit, lit);
1215 gen_arith3(ctx, &gen_op_extqh, ra, rb, rc, islit, lit);
1225 if (likely(rc != 31)) {
1226 if (ra == 31 || (rb == 31 && !islit))
1227 tcg_gen_movi_i64(cpu_ir[rc], 0);
1230 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1232 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1233 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1239 if (likely(rc != 31)) {
1240 if (ra == 31 || (rb == 31 && !islit))
1241 tcg_gen_movi_i64(cpu_ir[rc], 0);
1243 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1245 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1250 gen_arith3(ctx, &gen_op_umulh, ra, rb, rc, islit, lit);
1254 gen_arith3(ctx, &gen_op_mullv, ra, rb, rc, islit, lit);
1258 gen_arith3(ctx, &gen_op_mulqv, ra, rb, rc, islit, lit);
1265 switch (fpfn) { /* f11 & 0x3F */
1268 if (!(ctx->amask & AMASK_FIX))
1270 gen_itf(ctx, &gen_op_itofs, ra, rc);
1274 if (!(ctx->amask & AMASK_FIX))
1276 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
1280 if (!(ctx->amask & AMASK_FIX))
1282 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
1286 if (!(ctx->amask & AMASK_FIX))
1289 gen_itf(ctx, &gen_op_itoff, ra, rc);
1296 if (!(ctx->amask & AMASK_FIX))
1298 gen_itf(ctx, &gen_op_itoft, ra, rc);
1302 if (!(ctx->amask & AMASK_FIX))
1304 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
1308 if (!(ctx->amask & AMASK_FIX))
1310 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
1317 /* VAX floating point */
1318 /* XXX: rounding mode and trap are ignored (!) */
1319 switch (fpfn) { /* f11 & 0x3F */
1322 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1326 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1330 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1334 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1339 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1346 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1350 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1354 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1358 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1362 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1366 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1370 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1374 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1379 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1386 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1390 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1394 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1401 /* IEEE floating-point */
1402 /* XXX: rounding mode and traps are ignored (!) */
1403 switch (fpfn) { /* f11 & 0x3F */
1406 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1410 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1414 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1418 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1422 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1426 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1430 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1434 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1438 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1442 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1446 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1450 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1453 /* XXX: incorrect */
1454 if (fn11 == 0x2AC) {
1456 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1459 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1464 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1468 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1472 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1482 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1487 if (ra == 31 && rc == 31) {
1492 gen_load_fir(ctx, rb, 0);
1493 gen_store_fir(ctx, rc, 0);
1496 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1501 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1505 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1509 gen_load_fir(ctx, ra, 0);
1510 gen_op_store_fpcr();
1515 gen_store_fir(ctx, ra, 0);
1519 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1523 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1527 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1531 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1535 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1539 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1543 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1547 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1551 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1558 switch ((uint16_t)disp16) {
1561 /* No-op. Just exit from the current tb */
1566 /* No-op. Just exit from the current tb */
1589 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1595 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1600 /* XXX: TODO: evict tb cache at address rb */
1611 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1623 /* HW_MFPR (PALcode) */
1624 #if defined (CONFIG_USER_ONLY)
1629 gen_op_mfpr(insn & 0xFF);
1631 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1636 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1638 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1640 tcg_gen_movi_i64(cpu_pc, 0);
1641 /* Those four jumps only differ by the branch prediction hint */
1659 /* HW_LD (PALcode) */
1660 #if defined (CONFIG_USER_ONLY)
1666 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1668 tcg_gen_movi_i64(cpu_T[0], 0);
1669 tcg_gen_movi_i64(cpu_T[1], disp12);
1670 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1671 switch ((insn >> 12) & 0xF) {
1673 /* Longword physical access */
1677 /* Quadword physical access */
1681 /* Longword physical access with lock */
1685 /* Quadword physical access with lock */
1689 /* Longword virtual PTE fetch */
1690 gen_op_ldl_kernel();
1693 /* Quadword virtual PTE fetch */
1694 gen_op_ldq_kernel();
1703 /* Longword virtual access */
1704 gen_op_ld_phys_to_virt();
1708 /* Quadword virtual access */
1709 gen_op_ld_phys_to_virt();
1713 /* Longword virtual access with protection check */
1717 /* Quadword virtual access with protection check */
1721 /* Longword virtual access with altenate access mode */
1722 gen_op_set_alt_mode();
1723 gen_op_ld_phys_to_virt();
1725 gen_op_restore_mode();
1728 /* Quadword virtual access with altenate access mode */
1729 gen_op_set_alt_mode();
1730 gen_op_ld_phys_to_virt();
1732 gen_op_restore_mode();
1735 /* Longword virtual access with alternate access mode and
1738 gen_op_set_alt_mode();
1740 gen_op_restore_mode();
1743 /* Quadword virtual access with alternate access mode and
1746 gen_op_set_alt_mode();
1748 gen_op_restore_mode();
1752 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1759 if (!(ctx->amask & AMASK_BWX))
1761 if (likely(rc != 31)) {
1763 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1765 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1767 tcg_gen_movi_i64(cpu_ir[rc], 0);
1772 if (!(ctx->amask & AMASK_BWX))
1774 if (likely(rc != 31)) {
1776 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1778 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1780 tcg_gen_movi_i64(cpu_ir[rc], 0);
1785 if (!(ctx->amask & AMASK_CIX))
1787 if (likely(rc != 31)) {
1789 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1791 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1793 tcg_gen_movi_i64(cpu_ir[rc], 0);
1798 if (!(ctx->amask & AMASK_MVI))
1805 if (!(ctx->amask & AMASK_CIX))
1807 if (likely(rc != 31)) {
1809 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1811 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1813 tcg_gen_movi_i64(cpu_ir[rc], 0);
1818 if (!(ctx->amask & AMASK_CIX))
1820 if (likely(rc != 31)) {
1822 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1824 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1826 tcg_gen_movi_i64(cpu_ir[rc], 0);
1831 if (!(ctx->amask & AMASK_MVI))
1838 if (!(ctx->amask & AMASK_MVI))
1845 if (!(ctx->amask & AMASK_MVI))
1852 if (!(ctx->amask & AMASK_MVI))
1859 if (!(ctx->amask & AMASK_MVI))
1866 if (!(ctx->amask & AMASK_MVI))
1873 if (!(ctx->amask & AMASK_MVI))
1880 if (!(ctx->amask & AMASK_MVI))
1887 if (!(ctx->amask & AMASK_MVI))
1894 if (!(ctx->amask & AMASK_MVI))
1901 if (!(ctx->amask & AMASK_MVI))
1908 if (!(ctx->amask & AMASK_MVI))
1915 if (!(ctx->amask & AMASK_FIX))
1917 gen_fti(ctx, &gen_op_ftoit, ra, rb);
1921 if (!(ctx->amask & AMASK_FIX))
1923 gen_fti(ctx, &gen_op_ftois, ra, rb);
1930 /* HW_MTPR (PALcode) */
1931 #if defined (CONFIG_USER_ONLY)
1937 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
1939 tcg_gen_movi_i64(cpu_T[0], 0);
1940 gen_op_mtpr(insn & 0xFF);
1945 /* HW_REI (PALcode) */
1946 #if defined (CONFIG_USER_ONLY)
1956 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1958 tcg_gen_movi_i64(cpu_T[0], 0);
1959 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
1960 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1967 /* HW_ST (PALcode) */
1968 #if defined (CONFIG_USER_ONLY)
1974 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
1976 tcg_gen_movi_i64(cpu_T[0], disp12);
1978 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
1980 tcg_gen_movi_i64(cpu_T[1], 0);
1981 switch ((insn >> 12) & 0xF) {
1983 /* Longword physical access */
1987 /* Quadword physical access */
1991 /* Longword physical access with lock */
1995 /* Quadword physical access with lock */
1999 /* Longword virtual access */
2000 gen_op_st_phys_to_virt();
2004 /* Quadword virtual access */
2005 gen_op_st_phys_to_virt();
2027 /* Longword virtual access with alternate access mode */
2028 gen_op_set_alt_mode();
2029 gen_op_st_phys_to_virt();
2031 gen_op_restore_mode();
2034 /* Quadword virtual access with alternate access mode */
2035 gen_op_set_alt_mode();
2036 gen_op_st_phys_to_virt();
2038 gen_op_restore_mode();
2053 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
2061 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
2068 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
2072 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
2077 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
2085 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
2092 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
2096 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
2100 gen_load_mem(ctx, &gen_ldl, ra, rb, disp16, 0);
2104 gen_load_mem(ctx, &gen_ldq, ra, rb, disp16, 0);
2108 gen_load_mem(ctx, &gen_ldl_l, ra, rb, disp16, 0);
2112 gen_load_mem(ctx, &gen_ldq_l, ra, rb, disp16, 0);
2116 gen_store_mem(ctx, &gen_stl, ra, rb, disp16, 0);
2120 gen_store_mem(ctx, &gen_stq, ra, rb, disp16, 0);
2124 gen_store_mem(ctx, &gen_stl_c, ra, rb, disp16, 0);
2128 gen_store_mem(ctx, &gen_stq_c, ra, rb, disp16, 0);
2133 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2134 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2139 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
2144 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
2149 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
2155 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2156 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2161 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
2166 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
2171 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
2176 gen_bcond(ctx, &gen_op_cmplbc, ra, disp16);
2181 gen_bcond(ctx, &gen_op_cmpeqz, ra, disp16);
2186 gen_bcond(ctx, &gen_op_cmpltz, ra, disp16);
2191 gen_bcond(ctx, &gen_op_cmplez, ra, disp16);
2196 gen_bcond(ctx, &gen_op_cmplbs, ra, disp16);
2201 gen_bcond(ctx, &gen_op_cmpnez, ra, disp16);
2206 gen_bcond(ctx, &gen_op_cmpgez, ra, disp16);
2211 gen_bcond(ctx, &gen_op_cmpgtz, ra, disp16);
2223 static always_inline void gen_intermediate_code_internal (CPUState *env,
2224 TranslationBlock *tb,
2227 #if defined ALPHA_DEBUG_DISAS
2228 static int insn_count;
2230 DisasContext ctx, *ctxp = &ctx;
2231 target_ulong pc_start;
2233 uint16_t *gen_opc_end;
2240 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2242 ctx.amask = env->amask;
2243 #if defined (CONFIG_USER_ONLY)
2246 ctx.mem_idx = ((env->ps >> 3) & 3);
2247 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2250 max_insns = tb->cflags & CF_COUNT_MASK;
2252 max_insns = CF_COUNT_MASK;
2255 for (ret = 0; ret == 0;) {
2256 if (env->nb_breakpoints > 0) {
2257 for(j = 0; j < env->nb_breakpoints; j++) {
2258 if (env->breakpoints[j] == ctx.pc) {
2259 gen_excp(&ctx, EXCP_DEBUG, 0);
2265 j = gen_opc_ptr - gen_opc_buf;
2269 gen_opc_instr_start[lj++] = 0;
2270 gen_opc_pc[lj] = ctx.pc;
2271 gen_opc_instr_start[lj] = 1;
2272 gen_opc_icount[lj] = num_insns;
2275 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2277 #if defined ALPHA_DEBUG_DISAS
2279 if (logfile != NULL) {
2280 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2281 ctx.pc, ctx.mem_idx);
2284 insn = ldl_code(ctx.pc);
2285 #if defined ALPHA_DEBUG_DISAS
2287 if (logfile != NULL) {
2288 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2293 ret = translate_one(ctxp, insn);
2296 /* if we reach a page boundary or are single stepping, stop
2299 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2300 (env->singlestep_enabled) ||
2301 num_insns >= max_insns) {
2304 #if defined (DO_SINGLE_STEP)
2308 if (ret != 1 && ret != 3) {
2309 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2311 #if defined (DO_TB_FLUSH)
2312 tcg_gen_helper_0_0(helper_tb_flush);
2314 if (tb->cflags & CF_LAST_IO)
2316 /* Generate the return instruction */
2318 gen_icount_end(tb, num_insns);
2319 *gen_opc_ptr = INDEX_op_end;
2321 j = gen_opc_ptr - gen_opc_buf;
2324 gen_opc_instr_start[lj++] = 0;
2326 tb->size = ctx.pc - pc_start;
2327 tb->icount = num_insns;
2329 #if defined ALPHA_DEBUG_DISAS
2330 if (loglevel & CPU_LOG_TB_CPU) {
2331 cpu_dump_state(env, logfile, fprintf, 0);
2333 if (loglevel & CPU_LOG_TB_IN_ASM) {
2334 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2335 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2336 fprintf(logfile, "\n");
2341 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2343 gen_intermediate_code_internal(env, tb, 0);
2346 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2348 gen_intermediate_code_internal(env, tb, 1);
2351 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2356 env = qemu_mallocz(sizeof(CPUAlphaState));
2360 alpha_translate_init();
2362 /* XXX: should not be hardcoded */
2363 env->implver = IMPLVER_2106x;
2365 #if defined (CONFIG_USER_ONLY)
2369 /* Initialize IPR */
2370 hwpcb = env->ipr[IPR_PCBB];
2371 env->ipr[IPR_ASN] = 0;
2372 env->ipr[IPR_ASTEN] = 0;
2373 env->ipr[IPR_ASTSR] = 0;
2374 env->ipr[IPR_DATFX] = 0;
2376 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2377 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2378 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2379 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2380 env->ipr[IPR_FEN] = 0;
2381 env->ipr[IPR_IPL] = 31;
2382 env->ipr[IPR_MCES] = 0;
2383 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2384 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2385 env->ipr[IPR_SISR] = 0;
2386 env->ipr[IPR_VIRBND] = -1ULL;
2391 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2392 unsigned long searched_pc, int pc_pos, void *puc)
2394 env->pc = gen_opc_pc[pc_pos];