2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
34 #define ALPHA_DEBUG_DISAS
37 typedef struct DisasContext DisasContext;
41 #if !defined (CONFIG_USER_ONLY)
47 /* global register indexes */
49 static TCGv cpu_ir[31];
50 static TCGv cpu_fir[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init = 0;
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
78 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
79 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
83 for (i = 0; i < 31; i++) {
84 sprintf(p, "ir%d", i);
85 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
86 offsetof(CPUState, ir[i]), p);
87 p += (i < 10) ? 4 : 5;
89 sprintf(p, "fir%d", i);
90 cpu_fir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
91 offsetof(CPUState, fir[i]), p);
92 p += (i < 10) ? 5 : 6;
95 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
96 offsetof(CPUState, pc), "pc");
98 /* register helpers */
100 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
107 #if defined(CONFIG_USER_ONLY)
108 #define OP_LD_TABLE(width) \
109 static GenOpFunc *gen_op_ld##width[] = { \
110 &gen_op_ld##width##_raw, \
112 #define OP_ST_TABLE(width) \
113 static GenOpFunc *gen_op_st##width[] = { \
114 &gen_op_st##width##_raw, \
117 #define OP_LD_TABLE(width) \
118 static GenOpFunc *gen_op_ld##width[] = { \
119 &gen_op_ld##width##_kernel, \
120 &gen_op_ld##width##_executive, \
121 &gen_op_ld##width##_supervisor, \
122 &gen_op_ld##width##_user, \
124 #define OP_ST_TABLE(width) \
125 static GenOpFunc *gen_op_st##width[] = { \
126 &gen_op_st##width##_kernel, \
127 &gen_op_st##width##_executive, \
128 &gen_op_st##width##_supervisor, \
129 &gen_op_st##width##_user, \
133 #define GEN_LD(width) \
134 OP_LD_TABLE(width); \
135 static always_inline void gen_ld##width (DisasContext *ctx) \
137 (*gen_op_ld##width[ctx->mem_idx])(); \
140 #define GEN_ST(width) \
141 OP_ST_TABLE(width); \
142 static always_inline void gen_st##width (DisasContext *ctx) \
144 (*gen_op_st##width[ctx->mem_idx])(); \
156 static always_inline void gen_excp (DisasContext *ctx,
157 int exception, int error_code)
161 tcg_gen_movi_i64(cpu_pc, ctx->pc);
162 tmp1 = tcg_const_i32(exception);
163 tmp2 = tcg_const_i32(error_code);
164 tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
169 static always_inline void gen_invalid (DisasContext *ctx)
171 gen_excp(ctx, EXCP_OPCDEC, 0);
174 static always_inline void gen_load_mem_dyngen (DisasContext *ctx,
175 void (*gen_load_op)(DisasContext *ctx),
176 int ra, int rb, int32_t disp16,
179 if (ra != 31 || disp16 != 0) {
181 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
183 tcg_gen_movi_i64(cpu_T[0], disp16);
185 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
188 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
192 static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
194 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
195 tcg_gen_qemu_ld32u(tmp, t1, flags);
196 tcg_gen_helper_1_1(helper_memory_to_f, t0, tmp);
200 static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
202 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
203 tcg_gen_qemu_ld64(tmp, t1, flags);
204 tcg_gen_helper_1_1(helper_memory_to_g, t0, tmp);
208 static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
210 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
211 tcg_gen_qemu_ld32u(tmp, t1, flags);
212 tcg_gen_helper_1_1(helper_memory_to_s, t0, tmp);
216 static always_inline void gen_load_mem (DisasContext *ctx,
217 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
218 int ra, int rb, int32_t disp16,
223 if (unlikely(ra == 31))
226 addr = tcg_temp_new(TCG_TYPE_I64);
228 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
230 tcg_gen_andi_i64(addr, addr, ~0x7);
234 tcg_gen_movi_i64(addr, disp16);
237 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
239 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
243 static always_inline void gen_store_mem_dyngen (DisasContext *ctx,
244 void (*gen_store_op)(DisasContext *ctx),
245 int ra, int rb, int32_t disp16,
249 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
251 tcg_gen_movi_i64(cpu_T[0], disp16);
253 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
255 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
257 tcg_gen_movi_i64(cpu_T[1], 0);
258 (*gen_store_op)(ctx);
261 static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
263 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
264 tcg_gen_helper_1_1(helper_f_to_memory, tmp, t0);
265 tcg_gen_qemu_st32(tmp, t1, flags);
269 static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
271 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
272 tcg_gen_helper_1_1(helper_g_to_memory, tmp, t0);
273 tcg_gen_qemu_st64(tmp, t1, flags);
277 static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
279 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
280 tcg_gen_helper_1_1(helper_s_to_memory, tmp, t0);
281 tcg_gen_qemu_st32(tmp, t1, flags);
285 static always_inline void gen_store_mem (DisasContext *ctx,
286 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
287 int ra, int rb, int32_t disp16,
290 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
292 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
294 tcg_gen_andi_i64(addr, addr, ~0x7);
298 tcg_gen_movi_i64(addr, disp16);
302 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
304 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
306 TCGv zero = tcg_const_i64(0);
307 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
313 static always_inline void gen_bcond (DisasContext *ctx,
315 int ra, int32_t disp16, int mask)
319 l1 = gen_new_label();
320 l2 = gen_new_label();
321 if (likely(ra != 31)) {
323 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
324 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
325 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
328 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
330 /* Very uncommon case - Do not bother to optimize. */
331 TCGv tmp = tcg_const_i64(0);
332 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
335 tcg_gen_movi_i64(cpu_pc, ctx->pc);
338 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
342 static always_inline void gen_fbcond (DisasContext *ctx,
344 int ra, int32_t disp16)
349 l1 = gen_new_label();
350 l2 = gen_new_label();
352 tmp = tcg_temp_new(TCG_TYPE_I64);
353 tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
355 tmp = tcg_const_i64(0);
356 tcg_gen_helper_1_1(func, tmp, tmp);
358 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
359 tcg_gen_movi_i64(cpu_pc, ctx->pc);
362 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
366 static always_inline void gen_cmov (TCGCond inv_cond,
367 int ra, int rb, int rc,
368 int islit, uint8_t lit, int mask)
372 if (unlikely(rc == 31))
375 l1 = gen_new_label();
379 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
380 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
381 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
384 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
386 /* Very uncommon case - Do not bother to optimize. */
387 TCGv tmp = tcg_const_i64(0);
388 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
393 tcg_gen_movi_i64(cpu_ir[rc], lit);
395 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
399 static always_inline void gen_farith2 (void *helper,
402 if (unlikely(rc == 31))
406 tcg_gen_helper_1_1(helper, cpu_fir[rc], cpu_fir[rb]);
408 TCGv tmp = tcg_const_i64(0);
409 tcg_gen_helper_1_1(helper, cpu_fir[rc], tmp);
414 static always_inline void gen_farith3 (void *helper,
415 int ra, int rb, int rc)
417 if (unlikely(rc == 31))
422 tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);
424 TCGv tmp = tcg_const_i64(0);
425 tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], tmp);
429 TCGv tmp = tcg_const_i64(0);
431 tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, cpu_fir[rb]);
433 tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, tmp);
438 static always_inline void gen_fcmov (void *func,
439 int ra, int rb, int rc)
444 if (unlikely(rc == 31))
447 l1 = gen_new_label();
448 tmp = tcg_temp_new(TCG_TYPE_I64);
450 tmp = tcg_temp_new(TCG_TYPE_I64);
451 tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
453 tmp = tcg_const_i64(0);
454 tcg_gen_helper_1_1(func, tmp, tmp);
456 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
458 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
460 tcg_gen_movi_i64(cpu_fir[rc], 0);
464 /* EXTWH, EXTWH, EXTLH, EXTQH */
465 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
466 int ra, int rb, int rc,
467 int islit, uint8_t lit)
469 if (unlikely(rc == 31))
475 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
477 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
480 tmp1 = tcg_temp_new(TCG_TYPE_I64);
481 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
482 tcg_gen_shli_i64(tmp1, tmp1, 3);
483 tmp2 = tcg_const_i64(64);
484 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
486 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
490 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
492 tcg_gen_movi_i64(cpu_ir[rc], 0);
495 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
496 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
497 int ra, int rb, int rc,
498 int islit, uint8_t lit)
500 if (unlikely(rc == 31))
505 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
507 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
508 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
509 tcg_gen_shli_i64(tmp, tmp, 3);
510 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
514 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
516 tcg_gen_movi_i64(cpu_ir[rc], 0);
519 /* Code to call arith3 helpers */
520 static always_inline void gen_arith3 (void *helper,
521 int ra, int rb, int rc,
522 int islit, uint8_t lit)
524 if (unlikely(rc == 31))
529 TCGv tmp = tcg_const_i64(lit);
530 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
533 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
535 TCGv tmp1 = tcg_const_i64(0);
537 TCGv tmp2 = tcg_const_i64(lit);
538 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
541 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
546 static always_inline void gen_cmp(TCGCond cond,
547 int ra, int rb, int rc,
548 int islit, uint8_t lit)
553 if (unlikely(rc == 31))
556 l1 = gen_new_label();
557 l2 = gen_new_label();
560 tmp = tcg_temp_new(TCG_TYPE_I64);
561 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
563 tmp = tcg_const_i64(0);
565 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
567 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
569 tcg_gen_movi_i64(cpu_ir[rc], 0);
572 tcg_gen_movi_i64(cpu_ir[rc], 1);
576 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
579 int32_t disp21, disp16, disp12;
581 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
585 /* Decode all instruction fields */
587 ra = (insn >> 21) & 0x1F;
588 rb = (insn >> 16) & 0x1F;
590 sbz = (insn >> 13) & 0x07;
591 islit = (insn >> 12) & 1;
592 if (rb == 31 && !islit) {
596 lit = (insn >> 13) & 0xFF;
597 palcode = insn & 0x03FFFFFF;
598 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
599 disp16 = (int16_t)(insn & 0x0000FFFF);
600 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
601 fn16 = insn & 0x0000FFFF;
602 fn11 = (insn >> 5) & 0x000007FF;
604 fn7 = (insn >> 5) & 0x0000007F;
605 fn2 = (insn >> 5) & 0x00000003;
607 #if defined ALPHA_DEBUG_DISAS
608 if (logfile != NULL) {
609 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
610 opc, ra, rb, rc, disp16);
616 if (palcode >= 0x80 && palcode < 0xC0) {
617 /* Unprivileged PAL call */
618 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
619 #if !defined (CONFIG_USER_ONLY)
620 } else if (palcode < 0x40) {
621 /* Privileged PAL code */
622 if (ctx->mem_idx & 1)
625 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
628 /* Invalid PAL call */
656 if (likely(ra != 31)) {
658 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
660 tcg_gen_movi_i64(cpu_ir[ra], disp16);
665 if (likely(ra != 31)) {
667 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
669 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
674 if (!(ctx->amask & AMASK_BWX))
676 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
680 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
684 if (!(ctx->amask & AMASK_BWX))
686 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 1);
690 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
694 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
698 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
704 if (likely(rc != 31)) {
707 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
708 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
710 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
711 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
715 tcg_gen_movi_i64(cpu_ir[rc], lit);
717 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
723 if (likely(rc != 31)) {
725 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
726 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
728 tcg_gen_addi_i64(tmp, tmp, lit);
730 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
731 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
735 tcg_gen_movi_i64(cpu_ir[rc], lit);
737 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
743 if (likely(rc != 31)) {
746 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
748 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
749 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
752 tcg_gen_movi_i64(cpu_ir[rc], -lit);
754 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
755 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
761 if (likely(rc != 31)) {
763 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
764 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
766 tcg_gen_subi_i64(tmp, tmp, lit);
768 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
769 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
773 tcg_gen_movi_i64(cpu_ir[rc], -lit);
775 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
776 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
783 gen_arith3(helper_cmpbge, ra, rb, rc, islit, lit);
787 if (likely(rc != 31)) {
789 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
790 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
792 tcg_gen_addi_i64(tmp, tmp, lit);
794 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
795 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
799 tcg_gen_movi_i64(cpu_ir[rc], lit);
801 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
807 if (likely(rc != 31)) {
809 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
810 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
812 tcg_gen_subi_i64(tmp, tmp, lit);
814 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
815 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
819 tcg_gen_movi_i64(cpu_ir[rc], -lit);
821 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
822 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
829 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
833 if (likely(rc != 31)) {
836 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
838 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
841 tcg_gen_movi_i64(cpu_ir[rc], lit);
843 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
849 if (likely(rc != 31)) {
851 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
852 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
854 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
856 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
860 tcg_gen_movi_i64(cpu_ir[rc], lit);
862 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
868 if (likely(rc != 31)) {
871 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
873 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
876 tcg_gen_movi_i64(cpu_ir[rc], -lit);
878 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
884 if (likely(rc != 31)) {
886 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
887 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
889 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
891 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
895 tcg_gen_movi_i64(cpu_ir[rc], -lit);
897 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
903 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
907 if (likely(rc != 31)) {
909 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
910 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
912 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
914 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
918 tcg_gen_movi_i64(cpu_ir[rc], lit);
920 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
926 if (likely(rc != 31)) {
928 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
929 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
931 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
933 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
937 tcg_gen_movi_i64(cpu_ir[rc], -lit);
939 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
945 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
949 gen_arith3(helper_addlv, ra, rb, rc, islit, lit);
953 gen_arith3(helper_sublv, ra, rb, rc, islit, lit);
957 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
961 gen_arith3(helper_addqv, ra, rb, rc, islit, lit);
965 gen_arith3(helper_subqv, ra, rb, rc, islit, lit);
969 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
979 if (likely(rc != 31)) {
981 tcg_gen_movi_i64(cpu_ir[rc], 0);
983 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
985 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
990 if (likely(rc != 31)) {
993 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
995 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
996 tcg_gen_not_i64(tmp, cpu_ir[rb]);
997 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1001 tcg_gen_movi_i64(cpu_ir[rc], 0);
1006 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1010 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1014 if (likely(rc != 31)) {
1017 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1019 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1022 tcg_gen_movi_i64(cpu_ir[rc], lit);
1024 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1030 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1034 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1038 if (likely(rc != 31)) {
1041 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1043 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1044 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1045 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1050 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1052 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1058 if (likely(rc != 31)) {
1061 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1063 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1066 tcg_gen_movi_i64(cpu_ir[rc], lit);
1068 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1074 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1078 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1082 if (likely(rc != 31)) {
1085 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1087 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1088 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1089 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1094 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1096 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1102 if (likely(rc != 31)) {
1104 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1106 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1111 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1115 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1120 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1130 gen_arith3(helper_mskbl, ra, rb, rc, islit, lit);
1134 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1138 gen_arith3(helper_insbl, ra, rb, rc, islit, lit);
1142 gen_arith3(helper_mskwl, ra, rb, rc, islit, lit);
1146 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1150 gen_arith3(helper_inswl, ra, rb, rc, islit, lit);
1154 gen_arith3(helper_mskll, ra, rb, rc, islit, lit);
1158 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1162 gen_arith3(helper_insll, ra, rb, rc, islit, lit);
1166 gen_arith3(helper_zap, ra, rb, rc, islit, lit);
1170 gen_arith3(helper_zapnot, ra, rb, rc, islit, lit);
1174 gen_arith3(helper_mskql, ra, rb, rc, islit, lit);
1178 if (likely(rc != 31)) {
1181 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1183 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1184 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1185 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1186 tcg_temp_free(shift);
1189 tcg_gen_movi_i64(cpu_ir[rc], 0);
1194 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1198 if (likely(rc != 31)) {
1201 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1203 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1204 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1205 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1206 tcg_temp_free(shift);
1209 tcg_gen_movi_i64(cpu_ir[rc], 0);
1214 gen_arith3(helper_insql, ra, rb, rc, islit, lit);
1218 if (likely(rc != 31)) {
1221 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1223 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1224 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1225 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1226 tcg_temp_free(shift);
1229 tcg_gen_movi_i64(cpu_ir[rc], 0);
1234 gen_arith3(helper_mskwh, ra, rb, rc, islit, lit);
1238 gen_arith3(helper_inswh, ra, rb, rc, islit, lit);
1242 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1246 gen_arith3(helper_msklh, ra, rb, rc, islit, lit);
1250 gen_arith3(helper_inslh, ra, rb, rc, islit, lit);
1254 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1258 gen_arith3(helper_mskqh, ra, rb, rc, islit, lit);
1262 gen_arith3(helper_insqh, ra, rb, rc, islit, lit);
1266 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1276 if (likely(rc != 31)) {
1278 tcg_gen_movi_i64(cpu_ir[rc], 0);
1281 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1283 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1284 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1290 if (likely(rc != 31)) {
1292 tcg_gen_movi_i64(cpu_ir[rc], 0);
1294 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1296 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1301 gen_arith3(helper_umulh, ra, rb, rc, islit, lit);
1305 gen_arith3(helper_mullv, ra, rb, rc, islit, lit);
1309 gen_arith3(helper_mulqv, ra, rb, rc, islit, lit);
1316 switch (fpfn) { /* f11 & 0x3F */
1319 if (!(ctx->amask & AMASK_FIX))
1321 if (likely(rc != 31)) {
1323 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1324 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1325 tcg_gen_helper_1_1(helper_memory_to_s, cpu_fir[rc], tmp);
1328 tcg_gen_movi_i64(cpu_fir[rc], 0);
1333 if (!(ctx->amask & AMASK_FIX))
1335 gen_farith2(&helper_sqrtf, rb, rc);
1339 if (!(ctx->amask & AMASK_FIX))
1341 gen_farith2(&helper_sqrts, rb, rc);
1345 if (!(ctx->amask & AMASK_FIX))
1347 if (likely(rc != 31)) {
1349 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1350 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1351 tcg_gen_helper_1_1(helper_memory_to_f, cpu_fir[rc], tmp);
1354 tcg_gen_movi_i64(cpu_fir[rc], 0);
1359 if (!(ctx->amask & AMASK_FIX))
1361 if (likely(rc != 31)) {
1363 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1365 tcg_gen_movi_i64(cpu_fir[rc], 0);
1370 if (!(ctx->amask & AMASK_FIX))
1372 gen_farith2(&helper_sqrtg, rb, rc);
1376 if (!(ctx->amask & AMASK_FIX))
1378 gen_farith2(&helper_sqrtt, rb, rc);
1385 /* VAX floating point */
1386 /* XXX: rounding mode and trap are ignored (!) */
1387 switch (fpfn) { /* f11 & 0x3F */
1390 gen_farith3(&helper_addf, ra, rb, rc);
1394 gen_farith3(&helper_subf, ra, rb, rc);
1398 gen_farith3(&helper_mulf, ra, rb, rc);
1402 gen_farith3(&helper_divf, ra, rb, rc);
1407 gen_farith2(&helper_cvtdg, rb, rc);
1414 gen_farith3(&helper_addg, ra, rb, rc);
1418 gen_farith3(&helper_subg, ra, rb, rc);
1422 gen_farith3(&helper_mulg, ra, rb, rc);
1426 gen_farith3(&helper_divg, ra, rb, rc);
1430 gen_farith3(&helper_cmpgeq, ra, rb, rc);
1434 gen_farith3(&helper_cmpglt, ra, rb, rc);
1438 gen_farith3(&helper_cmpgle, ra, rb, rc);
1442 gen_farith2(&helper_cvtgf, rb, rc);
1447 gen_farith2(ctx, &helper_cvtgd, rb, rc);
1454 gen_farith2(&helper_cvtgq, rb, rc);
1458 gen_farith2(&helper_cvtqf, rb, rc);
1462 gen_farith2(&helper_cvtqg, rb, rc);
1469 /* IEEE floating-point */
1470 /* XXX: rounding mode and traps are ignored (!) */
1471 switch (fpfn) { /* f11 & 0x3F */
1474 gen_farith3(&helper_adds, ra, rb, rc);
1478 gen_farith3(&helper_subs, ra, rb, rc);
1482 gen_farith3(&helper_muls, ra, rb, rc);
1486 gen_farith3(&helper_divs, ra, rb, rc);
1490 gen_farith3(&helper_addt, ra, rb, rc);
1494 gen_farith3(&helper_subt, ra, rb, rc);
1498 gen_farith3(&helper_mult, ra, rb, rc);
1502 gen_farith3(&helper_divt, ra, rb, rc);
1506 gen_farith3(&helper_cmptun, ra, rb, rc);
1510 gen_farith3(&helper_cmpteq, ra, rb, rc);
1514 gen_farith3(&helper_cmptlt, ra, rb, rc);
1518 gen_farith3(&helper_cmptle, ra, rb, rc);
1521 /* XXX: incorrect */
1522 if (fn11 == 0x2AC) {
1524 gen_farith2(&helper_cvtst, rb, rc);
1527 gen_farith2(&helper_cvtts, rb, rc);
1532 gen_farith2(&helper_cvttq, rb, rc);
1536 gen_farith2(&helper_cvtqs, rb, rc);
1540 gen_farith2(&helper_cvtqt, rb, rc);
1550 gen_farith2(&helper_cvtlq, rb, rc);
1553 if (likely(rc != 31)) {
1556 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1559 gen_farith3(&helper_cpys, ra, rb, rc);
1564 gen_farith3(&helper_cpysn, ra, rb, rc);
1568 gen_farith3(&helper_cpyse, ra, rb, rc);
1572 if (likely(ra != 31))
1573 tcg_gen_helper_0_1(helper_store_fpcr, cpu_fir[ra]);
1575 TCGv tmp = tcg_const_i64(0);
1576 tcg_gen_helper_0_1(helper_store_fpcr, tmp);
1582 if (likely(ra != 31))
1583 tcg_gen_helper_1_0(helper_load_fpcr, cpu_fir[ra]);
1587 gen_fcmov(&helper_cmpfeq, ra, rb, rc);
1591 gen_fcmov(&helper_cmpfne, ra, rb, rc);
1595 gen_fcmov(&helper_cmpflt, ra, rb, rc);
1599 gen_fcmov(&helper_cmpfge, ra, rb, rc);
1603 gen_fcmov(&helper_cmpfle, ra, rb, rc);
1607 gen_fcmov(&helper_cmpfgt, ra, rb, rc);
1611 gen_farith2(&helper_cvtql, rb, rc);
1615 gen_farith2(&helper_cvtqlv, rb, rc);
1619 gen_farith2(&helper_cvtqlsv, rb, rc);
1626 switch ((uint16_t)disp16) {
1629 /* No-op. Just exit from the current tb */
1634 /* No-op. Just exit from the current tb */
1656 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1661 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1665 /* XXX: TODO: evict tb cache at address rb */
1675 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1686 /* HW_MFPR (PALcode) */
1687 #if defined (CONFIG_USER_ONLY)
1692 gen_op_mfpr(insn & 0xFF);
1694 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1699 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1701 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1703 tcg_gen_movi_i64(cpu_pc, 0);
1704 /* Those four jumps only differ by the branch prediction hint */
1722 /* HW_LD (PALcode) */
1723 #if defined (CONFIG_USER_ONLY)
1729 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1731 tcg_gen_movi_i64(cpu_T[0], 0);
1732 tcg_gen_movi_i64(cpu_T[1], disp12);
1733 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1734 switch ((insn >> 12) & 0xF) {
1736 /* Longword physical access */
1740 /* Quadword physical access */
1744 /* Longword physical access with lock */
1748 /* Quadword physical access with lock */
1752 /* Longword virtual PTE fetch */
1753 gen_op_ldl_kernel();
1756 /* Quadword virtual PTE fetch */
1757 gen_op_ldq_kernel();
1766 /* Longword virtual access */
1767 gen_op_ld_phys_to_virt();
1771 /* Quadword virtual access */
1772 gen_op_ld_phys_to_virt();
1776 /* Longword virtual access with protection check */
1780 /* Quadword virtual access with protection check */
1784 /* Longword virtual access with altenate access mode */
1785 gen_op_set_alt_mode();
1786 gen_op_ld_phys_to_virt();
1788 gen_op_restore_mode();
1791 /* Quadword virtual access with altenate access mode */
1792 gen_op_set_alt_mode();
1793 gen_op_ld_phys_to_virt();
1795 gen_op_restore_mode();
1798 /* Longword virtual access with alternate access mode and
1801 gen_op_set_alt_mode();
1803 gen_op_restore_mode();
1806 /* Quadword virtual access with alternate access mode and
1809 gen_op_set_alt_mode();
1811 gen_op_restore_mode();
1815 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1822 if (!(ctx->amask & AMASK_BWX))
1824 if (likely(rc != 31)) {
1826 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1828 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1833 if (!(ctx->amask & AMASK_BWX))
1835 if (likely(rc != 31)) {
1837 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1839 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1844 if (!(ctx->amask & AMASK_CIX))
1846 if (likely(rc != 31)) {
1848 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1850 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1855 if (!(ctx->amask & AMASK_MVI))
1862 if (!(ctx->amask & AMASK_CIX))
1864 if (likely(rc != 31)) {
1866 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1868 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1873 if (!(ctx->amask & AMASK_CIX))
1875 if (likely(rc != 31)) {
1877 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1879 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1884 if (!(ctx->amask & AMASK_MVI))
1891 if (!(ctx->amask & AMASK_MVI))
1898 if (!(ctx->amask & AMASK_MVI))
1905 if (!(ctx->amask & AMASK_MVI))
1912 if (!(ctx->amask & AMASK_MVI))
1919 if (!(ctx->amask & AMASK_MVI))
1926 if (!(ctx->amask & AMASK_MVI))
1933 if (!(ctx->amask & AMASK_MVI))
1940 if (!(ctx->amask & AMASK_MVI))
1947 if (!(ctx->amask & AMASK_MVI))
1954 if (!(ctx->amask & AMASK_MVI))
1961 if (!(ctx->amask & AMASK_MVI))
1968 if (!(ctx->amask & AMASK_FIX))
1970 if (likely(rc != 31)) {
1972 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
1974 tcg_gen_movi_i64(cpu_ir[rc], 0);
1979 if (!(ctx->amask & AMASK_FIX))
1982 TCGv tmp1 = tcg_temp_new(TCG_TYPE_I32);
1984 tcg_gen_helper_1_1(helper_s_to_memory, tmp1, cpu_fir[ra]);
1986 TCGv tmp2 = tcg_const_i64(0);
1987 tcg_gen_helper_1_1(helper_s_to_memory, tmp1, tmp2);
1988 tcg_temp_free(tmp2);
1990 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
1991 tcg_temp_free(tmp1);
1999 /* HW_MTPR (PALcode) */
2000 #if defined (CONFIG_USER_ONLY)
2006 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
2008 tcg_gen_movi_i64(cpu_T[0], 0);
2009 gen_op_mtpr(insn & 0xFF);
2014 /* HW_REI (PALcode) */
2015 #if defined (CONFIG_USER_ONLY)
2025 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
2027 tcg_gen_movi_i64(cpu_T[0], 0);
2028 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
2029 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2036 /* HW_ST (PALcode) */
2037 #if defined (CONFIG_USER_ONLY)
2043 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
2045 tcg_gen_movi_i64(cpu_T[0], disp12);
2047 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
2049 tcg_gen_movi_i64(cpu_T[1], 0);
2050 switch ((insn >> 12) & 0xF) {
2052 /* Longword physical access */
2056 /* Quadword physical access */
2060 /* Longword physical access with lock */
2064 /* Quadword physical access with lock */
2068 /* Longword virtual access */
2069 gen_op_st_phys_to_virt();
2073 /* Quadword virtual access */
2074 gen_op_st_phys_to_virt();
2096 /* Longword virtual access with alternate access mode */
2097 gen_op_set_alt_mode();
2098 gen_op_st_phys_to_virt();
2100 gen_op_restore_mode();
2103 /* Quadword virtual access with alternate access mode */
2104 gen_op_set_alt_mode();
2105 gen_op_st_phys_to_virt();
2107 gen_op_restore_mode();
2121 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2125 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2129 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2133 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2137 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2141 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2145 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2149 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2153 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2157 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2161 gen_load_mem_dyngen(ctx, &gen_ldl_l, ra, rb, disp16, 0);
2165 gen_load_mem_dyngen(ctx, &gen_ldq_l, ra, rb, disp16, 0);
2169 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2173 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2177 gen_store_mem_dyngen(ctx, &gen_stl_c, ra, rb, disp16, 0);
2181 gen_store_mem_dyngen(ctx, &gen_stq_c, ra, rb, disp16, 0);
2186 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2187 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2192 gen_fbcond(ctx, &helper_cmpfeq, ra, disp16);
2197 gen_fbcond(ctx, &helper_cmpflt, ra, disp16);
2202 gen_fbcond(ctx, &helper_cmpfle, ra, disp16);
2208 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2209 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2214 gen_fbcond(ctx, &helper_cmpfne, ra, disp16);
2219 gen_fbcond(ctx, &helper_cmpfge, ra, disp16);
2224 gen_fbcond(ctx, &helper_cmpfgt, ra, disp16);
2229 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2234 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2239 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2244 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2249 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2254 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2259 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2264 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2276 static always_inline void gen_intermediate_code_internal (CPUState *env,
2277 TranslationBlock *tb,
2280 #if defined ALPHA_DEBUG_DISAS
2281 static int insn_count;
2283 DisasContext ctx, *ctxp = &ctx;
2284 target_ulong pc_start;
2286 uint16_t *gen_opc_end;
2293 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2295 ctx.amask = env->amask;
2296 #if defined (CONFIG_USER_ONLY)
2299 ctx.mem_idx = ((env->ps >> 3) & 3);
2300 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2303 max_insns = tb->cflags & CF_COUNT_MASK;
2305 max_insns = CF_COUNT_MASK;
2308 for (ret = 0; ret == 0;) {
2309 if (env->nb_breakpoints > 0) {
2310 for(j = 0; j < env->nb_breakpoints; j++) {
2311 if (env->breakpoints[j] == ctx.pc) {
2312 gen_excp(&ctx, EXCP_DEBUG, 0);
2318 j = gen_opc_ptr - gen_opc_buf;
2322 gen_opc_instr_start[lj++] = 0;
2323 gen_opc_pc[lj] = ctx.pc;
2324 gen_opc_instr_start[lj] = 1;
2325 gen_opc_icount[lj] = num_insns;
2328 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2330 #if defined ALPHA_DEBUG_DISAS
2332 if (logfile != NULL) {
2333 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2334 ctx.pc, ctx.mem_idx);
2337 insn = ldl_code(ctx.pc);
2338 #if defined ALPHA_DEBUG_DISAS
2340 if (logfile != NULL) {
2341 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2346 ret = translate_one(ctxp, insn);
2349 /* if we reach a page boundary or are single stepping, stop
2352 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2353 (env->singlestep_enabled) ||
2354 num_insns >= max_insns) {
2357 #if defined (DO_SINGLE_STEP)
2361 if (ret != 1 && ret != 3) {
2362 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2364 #if defined (DO_TB_FLUSH)
2365 tcg_gen_helper_0_0(helper_tb_flush);
2367 if (tb->cflags & CF_LAST_IO)
2369 /* Generate the return instruction */
2371 gen_icount_end(tb, num_insns);
2372 *gen_opc_ptr = INDEX_op_end;
2374 j = gen_opc_ptr - gen_opc_buf;
2377 gen_opc_instr_start[lj++] = 0;
2379 tb->size = ctx.pc - pc_start;
2380 tb->icount = num_insns;
2382 #if defined ALPHA_DEBUG_DISAS
2383 if (loglevel & CPU_LOG_TB_CPU) {
2384 cpu_dump_state(env, logfile, fprintf, 0);
2386 if (loglevel & CPU_LOG_TB_IN_ASM) {
2387 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2388 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2389 fprintf(logfile, "\n");
2394 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2396 gen_intermediate_code_internal(env, tb, 0);
2399 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2401 gen_intermediate_code_internal(env, tb, 1);
2404 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2409 env = qemu_mallocz(sizeof(CPUAlphaState));
2413 alpha_translate_init();
2415 /* XXX: should not be hardcoded */
2416 env->implver = IMPLVER_2106x;
2418 #if defined (CONFIG_USER_ONLY)
2422 /* Initialize IPR */
2423 hwpcb = env->ipr[IPR_PCBB];
2424 env->ipr[IPR_ASN] = 0;
2425 env->ipr[IPR_ASTEN] = 0;
2426 env->ipr[IPR_ASTSR] = 0;
2427 env->ipr[IPR_DATFX] = 0;
2429 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2430 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2431 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2432 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2433 env->ipr[IPR_FEN] = 0;
2434 env->ipr[IPR_IPL] = 31;
2435 env->ipr[IPR_MCES] = 0;
2436 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2437 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2438 env->ipr[IPR_SISR] = 0;
2439 env->ipr[IPR_VIRBND] = -1ULL;
2444 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2445 unsigned long searched_pc, int pc_pos, void *puc)
2447 env->pc = gen_opc_pc[pc_pos];