2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 /* #define DO_SINGLE_STEP */
34 #define ALPHA_DEBUG_DISAS
35 /* #define DO_TB_FLUSH */
37 typedef struct DisasContext DisasContext;
41 #if !defined (CONFIG_USER_ONLY)
47 /* global register indexes */
49 static TCGv cpu_ir[31];
50 static TCGv cpu_fir[31];
55 static char cpu_reg_names[10*4+21*5 + 10*5+21*6];
57 #include "gen-icount.h"
59 static void alpha_translate_init(void)
63 static int done_init = 0;
68 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
71 for (i = 0; i < 31; i++) {
72 sprintf(p, "ir%d", i);
73 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, ir[i]), p);
75 p += (i < 10) ? 4 : 5;
77 sprintf(p, "fir%d", i);
78 cpu_fir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
79 offsetof(CPUState, fir[i]), p);
80 p += (i < 10) ? 5 : 6;
83 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
84 offsetof(CPUState, pc), "pc");
86 cpu_lock = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
87 offsetof(CPUState, lock), "lock");
89 /* register helpers */
91 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
97 static always_inline void gen_excp (DisasContext *ctx,
98 int exception, int error_code)
102 tcg_gen_movi_i64(cpu_pc, ctx->pc);
103 tmp1 = tcg_const_i32(exception);
104 tmp2 = tcg_const_i32(error_code);
105 tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
110 static always_inline void gen_invalid (DisasContext *ctx)
112 gen_excp(ctx, EXCP_OPCDEC, 0);
115 static always_inline void gen_qemu_ldf (TCGv t0, TCGv t1, int flags)
117 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
118 tcg_gen_qemu_ld32u(tmp, t1, flags);
119 tcg_gen_helper_1_1(helper_memory_to_f, t0, tmp);
123 static always_inline void gen_qemu_ldg (TCGv t0, TCGv t1, int flags)
125 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
126 tcg_gen_qemu_ld64(tmp, t1, flags);
127 tcg_gen_helper_1_1(helper_memory_to_g, t0, tmp);
131 static always_inline void gen_qemu_lds (TCGv t0, TCGv t1, int flags)
133 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
134 tcg_gen_qemu_ld32u(tmp, t1, flags);
135 tcg_gen_helper_1_1(helper_memory_to_s, t0, tmp);
139 static always_inline void gen_qemu_ldl_l (TCGv t0, TCGv t1, int flags)
141 tcg_gen_mov_i64(cpu_lock, t1);
142 tcg_gen_qemu_ld32s(t0, t1, flags);
145 static always_inline void gen_qemu_ldq_l (TCGv t0, TCGv t1, int flags)
147 tcg_gen_mov_i64(cpu_lock, t1);
148 tcg_gen_qemu_ld64(t0, t1, flags);
151 static always_inline void gen_load_mem (DisasContext *ctx,
152 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
153 int ra, int rb, int32_t disp16,
158 if (unlikely(ra == 31))
161 addr = tcg_temp_new(TCG_TYPE_I64);
163 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
165 tcg_gen_andi_i64(addr, addr, ~0x7);
169 tcg_gen_movi_i64(addr, disp16);
172 tcg_gen_qemu_load(cpu_fir[ra], addr, ctx->mem_idx);
174 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
178 static always_inline void gen_qemu_stf (TCGv t0, TCGv t1, int flags)
180 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
181 tcg_gen_helper_1_1(helper_f_to_memory, tmp, t0);
182 tcg_gen_qemu_st32(tmp, t1, flags);
186 static always_inline void gen_qemu_stg (TCGv t0, TCGv t1, int flags)
188 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
189 tcg_gen_helper_1_1(helper_g_to_memory, tmp, t0);
190 tcg_gen_qemu_st64(tmp, t1, flags);
194 static always_inline void gen_qemu_sts (TCGv t0, TCGv t1, int flags)
196 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
197 tcg_gen_helper_1_1(helper_s_to_memory, tmp, t0);
198 tcg_gen_qemu_st32(tmp, t1, flags);
202 static always_inline void gen_qemu_stl_c (TCGv t0, TCGv t1, int flags)
206 l1 = gen_new_label();
207 l2 = gen_new_label();
208 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
209 tcg_gen_qemu_st32(t0, t1, flags);
210 tcg_gen_movi_i64(t0, 1);
213 tcg_gen_movi_i64(t0, 0);
215 tcg_gen_movi_i64(cpu_lock, -1);
218 static always_inline void gen_qemu_stq_c (TCGv t0, TCGv t1, int flags)
222 l1 = gen_new_label();
223 l2 = gen_new_label();
224 tcg_gen_brcond_i64(TCG_COND_NE, cpu_lock, t1, l1);
225 tcg_gen_qemu_st64(t0, t1, flags);
226 tcg_gen_movi_i64(t0, 1);
229 tcg_gen_movi_i64(t0, 0);
231 tcg_gen_movi_i64(cpu_lock, -1);
234 static always_inline void gen_store_mem (DisasContext *ctx,
235 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
236 int ra, int rb, int32_t disp16,
237 int fp, int clear, int local)
241 addr = tcg_temp_local_new(TCG_TYPE_I64);
243 addr = tcg_temp_new(TCG_TYPE_I64);
245 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
247 tcg_gen_andi_i64(addr, addr, ~0x7);
251 tcg_gen_movi_i64(addr, disp16);
255 tcg_gen_qemu_store(cpu_fir[ra], addr, ctx->mem_idx);
257 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
261 zero = tcg_const_local_i64(0);
263 zero = tcg_const_i64(0);
264 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
270 static always_inline void gen_bcond (DisasContext *ctx,
272 int ra, int32_t disp16, int mask)
276 l1 = gen_new_label();
277 l2 = gen_new_label();
278 if (likely(ra != 31)) {
280 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
281 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
282 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
285 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
287 /* Very uncommon case - Do not bother to optimize. */
288 TCGv tmp = tcg_const_i64(0);
289 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
292 tcg_gen_movi_i64(cpu_pc, ctx->pc);
295 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
299 static always_inline void gen_fbcond (DisasContext *ctx,
301 int ra, int32_t disp16)
306 l1 = gen_new_label();
307 l2 = gen_new_label();
309 tmp = tcg_temp_new(TCG_TYPE_I64);
310 tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
312 tmp = tcg_const_i64(0);
313 tcg_gen_helper_1_1(func, tmp, tmp);
315 tcg_gen_brcondi_i64(TCG_COND_NE, tmp, 0, l1);
316 tcg_gen_movi_i64(cpu_pc, ctx->pc);
319 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
323 static always_inline void gen_cmov (TCGCond inv_cond,
324 int ra, int rb, int rc,
325 int islit, uint8_t lit, int mask)
329 if (unlikely(rc == 31))
332 l1 = gen_new_label();
336 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
337 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
338 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
341 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
343 /* Very uncommon case - Do not bother to optimize. */
344 TCGv tmp = tcg_const_i64(0);
345 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
350 tcg_gen_movi_i64(cpu_ir[rc], lit);
352 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
356 static always_inline void gen_farith2 (void *helper,
359 if (unlikely(rc == 31))
363 tcg_gen_helper_1_1(helper, cpu_fir[rc], cpu_fir[rb]);
365 TCGv tmp = tcg_const_i64(0);
366 tcg_gen_helper_1_1(helper, cpu_fir[rc], tmp);
371 static always_inline void gen_farith3 (void *helper,
372 int ra, int rb, int rc)
374 if (unlikely(rc == 31))
379 tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], cpu_fir[rb]);
381 TCGv tmp = tcg_const_i64(0);
382 tcg_gen_helper_1_2(helper, cpu_fir[rc], cpu_fir[ra], tmp);
386 TCGv tmp = tcg_const_i64(0);
388 tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, cpu_fir[rb]);
390 tcg_gen_helper_1_2(helper, cpu_fir[rc], tmp, tmp);
395 static always_inline void gen_fcmov (void *func,
396 int ra, int rb, int rc)
401 if (unlikely(rc == 31))
404 l1 = gen_new_label();
405 tmp = tcg_temp_new(TCG_TYPE_I64);
407 tmp = tcg_temp_new(TCG_TYPE_I64);
408 tcg_gen_helper_1_1(func, tmp, cpu_fir[ra]);
410 tmp = tcg_const_i64(0);
411 tcg_gen_helper_1_1(func, tmp, tmp);
413 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
415 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
417 tcg_gen_movi_i64(cpu_fir[rc], 0);
421 /* EXTWH, EXTWH, EXTLH, EXTQH */
422 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
423 int ra, int rb, int rc,
424 int islit, uint8_t lit)
426 if (unlikely(rc == 31))
432 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
434 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
437 tmp1 = tcg_temp_new(TCG_TYPE_I64);
438 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
439 tcg_gen_shli_i64(tmp1, tmp1, 3);
440 tmp2 = tcg_const_i64(64);
441 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
443 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
447 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
449 tcg_gen_movi_i64(cpu_ir[rc], 0);
452 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
453 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
454 int ra, int rb, int rc,
455 int islit, uint8_t lit)
457 if (unlikely(rc == 31))
462 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
464 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
465 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
466 tcg_gen_shli_i64(tmp, tmp, 3);
467 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
471 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
473 tcg_gen_movi_i64(cpu_ir[rc], 0);
476 /* Code to call arith3 helpers */
477 static always_inline void gen_arith3 (void *helper,
478 int ra, int rb, int rc,
479 int islit, uint8_t lit)
481 if (unlikely(rc == 31))
486 TCGv tmp = tcg_const_i64(lit);
487 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
490 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
492 TCGv tmp1 = tcg_const_i64(0);
494 TCGv tmp2 = tcg_const_i64(lit);
495 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
498 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
503 static always_inline void gen_cmp(TCGCond cond,
504 int ra, int rb, int rc,
505 int islit, uint8_t lit)
510 if (unlikely(rc == 31))
513 l1 = gen_new_label();
514 l2 = gen_new_label();
517 tmp = tcg_temp_new(TCG_TYPE_I64);
518 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
520 tmp = tcg_const_i64(0);
522 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
524 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
526 tcg_gen_movi_i64(cpu_ir[rc], 0);
529 tcg_gen_movi_i64(cpu_ir[rc], 1);
533 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
536 int32_t disp21, disp16, disp12;
538 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
542 /* Decode all instruction fields */
544 ra = (insn >> 21) & 0x1F;
545 rb = (insn >> 16) & 0x1F;
547 sbz = (insn >> 13) & 0x07;
548 islit = (insn >> 12) & 1;
549 if (rb == 31 && !islit) {
553 lit = (insn >> 13) & 0xFF;
554 palcode = insn & 0x03FFFFFF;
555 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
556 disp16 = (int16_t)(insn & 0x0000FFFF);
557 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
558 fn16 = insn & 0x0000FFFF;
559 fn11 = (insn >> 5) & 0x000007FF;
561 fn7 = (insn >> 5) & 0x0000007F;
562 fn2 = (insn >> 5) & 0x00000003;
564 #if defined ALPHA_DEBUG_DISAS
565 if (logfile != NULL) {
566 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
567 opc, ra, rb, rc, disp16);
573 if (palcode >= 0x80 && palcode < 0xC0) {
574 /* Unprivileged PAL call */
575 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
576 #if !defined (CONFIG_USER_ONLY)
577 } else if (palcode < 0x40) {
578 /* Privileged PAL code */
579 if (ctx->mem_idx & 1)
582 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
585 /* Invalid PAL call */
613 if (likely(ra != 31)) {
615 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
617 tcg_gen_movi_i64(cpu_ir[ra], disp16);
622 if (likely(ra != 31)) {
624 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
626 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
631 if (!(ctx->amask & AMASK_BWX))
633 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
637 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
641 if (!(ctx->amask & AMASK_BWX))
643 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 1);
647 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0, 0);
651 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0, 0);
655 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1, 0);
661 if (likely(rc != 31)) {
664 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
665 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
667 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
668 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
672 tcg_gen_movi_i64(cpu_ir[rc], lit);
674 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
680 if (likely(rc != 31)) {
682 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
683 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
685 tcg_gen_addi_i64(tmp, tmp, lit);
687 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
688 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
692 tcg_gen_movi_i64(cpu_ir[rc], lit);
694 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
700 if (likely(rc != 31)) {
703 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
705 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
706 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
709 tcg_gen_movi_i64(cpu_ir[rc], -lit);
711 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
712 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
718 if (likely(rc != 31)) {
720 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
721 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
723 tcg_gen_subi_i64(tmp, tmp, lit);
725 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
726 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
730 tcg_gen_movi_i64(cpu_ir[rc], -lit);
732 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
733 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
740 gen_arith3(helper_cmpbge, ra, rb, rc, islit, lit);
744 if (likely(rc != 31)) {
746 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
747 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
749 tcg_gen_addi_i64(tmp, tmp, lit);
751 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
752 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
756 tcg_gen_movi_i64(cpu_ir[rc], lit);
758 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
764 if (likely(rc != 31)) {
766 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
767 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
769 tcg_gen_subi_i64(tmp, tmp, lit);
771 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
772 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
776 tcg_gen_movi_i64(cpu_ir[rc], -lit);
778 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
779 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
786 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
790 if (likely(rc != 31)) {
793 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
795 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
798 tcg_gen_movi_i64(cpu_ir[rc], lit);
800 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
806 if (likely(rc != 31)) {
808 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
809 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
811 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
813 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
817 tcg_gen_movi_i64(cpu_ir[rc], lit);
819 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
825 if (likely(rc != 31)) {
828 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
830 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
833 tcg_gen_movi_i64(cpu_ir[rc], -lit);
835 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
841 if (likely(rc != 31)) {
843 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
844 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
846 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
848 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
852 tcg_gen_movi_i64(cpu_ir[rc], -lit);
854 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
860 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
864 if (likely(rc != 31)) {
866 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
867 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
869 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
871 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
875 tcg_gen_movi_i64(cpu_ir[rc], lit);
877 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
883 if (likely(rc != 31)) {
885 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
886 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
888 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
890 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
894 tcg_gen_movi_i64(cpu_ir[rc], -lit);
896 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
902 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
906 gen_arith3(helper_addlv, ra, rb, rc, islit, lit);
910 gen_arith3(helper_sublv, ra, rb, rc, islit, lit);
914 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
918 gen_arith3(helper_addqv, ra, rb, rc, islit, lit);
922 gen_arith3(helper_subqv, ra, rb, rc, islit, lit);
926 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
936 if (likely(rc != 31)) {
938 tcg_gen_movi_i64(cpu_ir[rc], 0);
940 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
942 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
947 if (likely(rc != 31)) {
950 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
952 tcg_gen_andc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
954 tcg_gen_movi_i64(cpu_ir[rc], 0);
959 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
963 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 1);
967 if (likely(rc != 31)) {
970 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
972 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
975 tcg_gen_movi_i64(cpu_ir[rc], lit);
977 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
983 gen_cmov(TCG_COND_NE, ra, rb, rc, islit, lit, 0);
987 gen_cmov(TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
991 if (likely(rc != 31)) {
994 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
996 tcg_gen_orc_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
999 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1001 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1007 if (likely(rc != 31)) {
1010 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1012 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1015 tcg_gen_movi_i64(cpu_ir[rc], lit);
1017 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1023 gen_cmov(TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1027 gen_cmov(TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1031 if (likely(rc != 31)) {
1034 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1036 tcg_gen_eqv_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1039 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1041 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1047 if (likely(rc != 31)) {
1049 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1051 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1056 gen_cmov(TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1060 gen_cmov(TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1065 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1075 gen_arith3(helper_mskbl, ra, rb, rc, islit, lit);
1079 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1083 gen_arith3(helper_insbl, ra, rb, rc, islit, lit);
1087 gen_arith3(helper_mskwl, ra, rb, rc, islit, lit);
1091 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1095 gen_arith3(helper_inswl, ra, rb, rc, islit, lit);
1099 gen_arith3(helper_mskll, ra, rb, rc, islit, lit);
1103 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1107 gen_arith3(helper_insll, ra, rb, rc, islit, lit);
1111 gen_arith3(helper_zap, ra, rb, rc, islit, lit);
1115 gen_arith3(helper_zapnot, ra, rb, rc, islit, lit);
1119 gen_arith3(helper_mskql, ra, rb, rc, islit, lit);
1123 if (likely(rc != 31)) {
1126 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1128 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1129 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1130 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1131 tcg_temp_free(shift);
1134 tcg_gen_movi_i64(cpu_ir[rc], 0);
1139 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1143 if (likely(rc != 31)) {
1146 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1148 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1149 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1150 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1151 tcg_temp_free(shift);
1154 tcg_gen_movi_i64(cpu_ir[rc], 0);
1159 gen_arith3(helper_insql, ra, rb, rc, islit, lit);
1163 if (likely(rc != 31)) {
1166 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1168 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1169 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1170 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1171 tcg_temp_free(shift);
1174 tcg_gen_movi_i64(cpu_ir[rc], 0);
1179 gen_arith3(helper_mskwh, ra, rb, rc, islit, lit);
1183 gen_arith3(helper_inswh, ra, rb, rc, islit, lit);
1187 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1191 gen_arith3(helper_msklh, ra, rb, rc, islit, lit);
1195 gen_arith3(helper_inslh, ra, rb, rc, islit, lit);
1199 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1203 gen_arith3(helper_mskqh, ra, rb, rc, islit, lit);
1207 gen_arith3(helper_insqh, ra, rb, rc, islit, lit);
1211 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1221 if (likely(rc != 31)) {
1223 tcg_gen_movi_i64(cpu_ir[rc], 0);
1226 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1228 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1229 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1235 if (likely(rc != 31)) {
1237 tcg_gen_movi_i64(cpu_ir[rc], 0);
1239 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1241 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1246 gen_arith3(helper_umulh, ra, rb, rc, islit, lit);
1250 gen_arith3(helper_mullv, ra, rb, rc, islit, lit);
1254 gen_arith3(helper_mulqv, ra, rb, rc, islit, lit);
1261 switch (fpfn) { /* f11 & 0x3F */
1264 if (!(ctx->amask & AMASK_FIX))
1266 if (likely(rc != 31)) {
1268 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1269 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1270 tcg_gen_helper_1_1(helper_memory_to_s, cpu_fir[rc], tmp);
1273 tcg_gen_movi_i64(cpu_fir[rc], 0);
1278 if (!(ctx->amask & AMASK_FIX))
1280 gen_farith2(&helper_sqrtf, rb, rc);
1284 if (!(ctx->amask & AMASK_FIX))
1286 gen_farith2(&helper_sqrts, rb, rc);
1290 if (!(ctx->amask & AMASK_FIX))
1292 if (likely(rc != 31)) {
1294 TCGv tmp = tcg_temp_new(TCG_TYPE_I32);
1295 tcg_gen_trunc_i64_i32(tmp, cpu_ir[ra]);
1296 tcg_gen_helper_1_1(helper_memory_to_f, cpu_fir[rc], tmp);
1299 tcg_gen_movi_i64(cpu_fir[rc], 0);
1304 if (!(ctx->amask & AMASK_FIX))
1306 if (likely(rc != 31)) {
1308 tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
1310 tcg_gen_movi_i64(cpu_fir[rc], 0);
1315 if (!(ctx->amask & AMASK_FIX))
1317 gen_farith2(&helper_sqrtg, rb, rc);
1321 if (!(ctx->amask & AMASK_FIX))
1323 gen_farith2(&helper_sqrtt, rb, rc);
1330 /* VAX floating point */
1331 /* XXX: rounding mode and trap are ignored (!) */
1332 switch (fpfn) { /* f11 & 0x3F */
1335 gen_farith3(&helper_addf, ra, rb, rc);
1339 gen_farith3(&helper_subf, ra, rb, rc);
1343 gen_farith3(&helper_mulf, ra, rb, rc);
1347 gen_farith3(&helper_divf, ra, rb, rc);
1352 gen_farith2(&helper_cvtdg, rb, rc);
1359 gen_farith3(&helper_addg, ra, rb, rc);
1363 gen_farith3(&helper_subg, ra, rb, rc);
1367 gen_farith3(&helper_mulg, ra, rb, rc);
1371 gen_farith3(&helper_divg, ra, rb, rc);
1375 gen_farith3(&helper_cmpgeq, ra, rb, rc);
1379 gen_farith3(&helper_cmpglt, ra, rb, rc);
1383 gen_farith3(&helper_cmpgle, ra, rb, rc);
1387 gen_farith2(&helper_cvtgf, rb, rc);
1392 gen_farith2(ctx, &helper_cvtgd, rb, rc);
1399 gen_farith2(&helper_cvtgq, rb, rc);
1403 gen_farith2(&helper_cvtqf, rb, rc);
1407 gen_farith2(&helper_cvtqg, rb, rc);
1414 /* IEEE floating-point */
1415 /* XXX: rounding mode and traps are ignored (!) */
1416 switch (fpfn) { /* f11 & 0x3F */
1419 gen_farith3(&helper_adds, ra, rb, rc);
1423 gen_farith3(&helper_subs, ra, rb, rc);
1427 gen_farith3(&helper_muls, ra, rb, rc);
1431 gen_farith3(&helper_divs, ra, rb, rc);
1435 gen_farith3(&helper_addt, ra, rb, rc);
1439 gen_farith3(&helper_subt, ra, rb, rc);
1443 gen_farith3(&helper_mult, ra, rb, rc);
1447 gen_farith3(&helper_divt, ra, rb, rc);
1451 gen_farith3(&helper_cmptun, ra, rb, rc);
1455 gen_farith3(&helper_cmpteq, ra, rb, rc);
1459 gen_farith3(&helper_cmptlt, ra, rb, rc);
1463 gen_farith3(&helper_cmptle, ra, rb, rc);
1466 /* XXX: incorrect */
1467 if (fn11 == 0x2AC || fn11 == 0x6AC) {
1469 gen_farith2(&helper_cvtst, rb, rc);
1472 gen_farith2(&helper_cvtts, rb, rc);
1477 gen_farith2(&helper_cvttq, rb, rc);
1481 gen_farith2(&helper_cvtqs, rb, rc);
1485 gen_farith2(&helper_cvtqt, rb, rc);
1495 gen_farith2(&helper_cvtlq, rb, rc);
1498 if (likely(rc != 31)) {
1501 tcg_gen_mov_i64(cpu_fir[rc], cpu_fir[ra]);
1504 gen_farith3(&helper_cpys, ra, rb, rc);
1509 gen_farith3(&helper_cpysn, ra, rb, rc);
1513 gen_farith3(&helper_cpyse, ra, rb, rc);
1517 if (likely(ra != 31))
1518 tcg_gen_helper_0_1(helper_store_fpcr, cpu_fir[ra]);
1520 TCGv tmp = tcg_const_i64(0);
1521 tcg_gen_helper_0_1(helper_store_fpcr, tmp);
1527 if (likely(ra != 31))
1528 tcg_gen_helper_1_0(helper_load_fpcr, cpu_fir[ra]);
1532 gen_fcmov(&helper_cmpfeq, ra, rb, rc);
1536 gen_fcmov(&helper_cmpfne, ra, rb, rc);
1540 gen_fcmov(&helper_cmpflt, ra, rb, rc);
1544 gen_fcmov(&helper_cmpfge, ra, rb, rc);
1548 gen_fcmov(&helper_cmpfle, ra, rb, rc);
1552 gen_fcmov(&helper_cmpfgt, ra, rb, rc);
1556 gen_farith2(&helper_cvtql, rb, rc);
1560 gen_farith2(&helper_cvtqlv, rb, rc);
1564 gen_farith2(&helper_cvtqlsv, rb, rc);
1571 switch ((uint16_t)disp16) {
1574 /* No-op. Just exit from the current tb */
1579 /* No-op. Just exit from the current tb */
1601 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1606 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1610 /* XXX: TODO: evict tb cache at address rb */
1620 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1631 /* HW_MFPR (PALcode) */
1632 #if defined (CONFIG_USER_ONLY)
1638 TCGv tmp = tcg_const_i32(insn & 0xFF);
1639 tcg_gen_helper_1_2(helper_mfpr, cpu_ir[ra], tmp, cpu_ir[ra]);
1646 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1648 tcg_gen_movi_i64(cpu_pc, 0);
1650 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1651 /* Those four jumps only differ by the branch prediction hint */
1669 /* HW_LD (PALcode) */
1670 #if defined (CONFIG_USER_ONLY)
1676 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
1678 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
1680 tcg_gen_movi_i64(addr, disp12);
1681 switch ((insn >> 12) & 0xF) {
1683 /* Longword physical access */
1684 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1687 /* Quadword physical access */
1688 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1691 /* Longword physical access with lock */
1692 tcg_gen_helper_0_2(helper_ldl_l_raw, cpu_ir[ra], addr);
1695 /* Quadword physical access with lock */
1696 tcg_gen_helper_0_2(helper_ldq_l_raw, cpu_ir[ra], addr);
1699 /* Longword virtual PTE fetch */
1700 tcg_gen_helper_0_2(helper_ldl_kernel, cpu_ir[ra], addr);
1703 /* Quadword virtual PTE fetch */
1704 tcg_gen_helper_0_2(helper_ldq_kernel, cpu_ir[ra], addr);
1707 /* Incpu_ir[ra]id */
1708 goto incpu_ir[ra]id_opc;
1710 /* Incpu_ir[ra]id */
1711 goto incpu_ir[ra]id_opc;
1713 /* Longword virtual access */
1714 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1715 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1718 /* Quadword virtual access */
1719 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1720 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1723 /* Longword virtual access with protection check */
1724 tcg_gen_qemu_ld32s(cpu_ir[ra], addr, ctx->flags);
1727 /* Quadword virtual access with protection check */
1728 tcg_gen_qemu_ld64(cpu_ir[ra], addr, ctx->flags);
1731 /* Longword virtual access with altenate access mode */
1732 tcg_gen_helper_0_0(helper_set_alt_mode);
1733 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1734 tcg_gen_helper_0_2(helper_ldl_raw, cpu_ir[ra], addr);
1735 tcg_gen_helper_0_0(helper_restore_mode);
1738 /* Quadword virtual access with altenate access mode */
1739 tcg_gen_helper_0_0(helper_set_alt_mode);
1740 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
1741 tcg_gen_helper_0_2(helper_ldq_raw, cpu_ir[ra], addr);
1742 tcg_gen_helper_0_0(helper_restore_mode);
1745 /* Longword virtual access with alternate access mode and
1748 tcg_gen_helper_0_0(helper_set_alt_mode);
1749 tcg_gen_helper_0_2(helper_ldl_data, cpu_ir[ra], addr);
1750 tcg_gen_helper_0_0(helper_restore_mode);
1753 /* Quadword virtual access with alternate access mode and
1756 tcg_gen_helper_0_0(helper_set_alt_mode);
1757 tcg_gen_helper_0_2(helper_ldq_data, cpu_ir[ra], addr);
1758 tcg_gen_helper_0_0(helper_restore_mode);
1761 tcg_temp_free(addr);
1769 if (!(ctx->amask & AMASK_BWX))
1771 if (likely(rc != 31)) {
1773 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1775 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1780 if (!(ctx->amask & AMASK_BWX))
1782 if (likely(rc != 31)) {
1784 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1786 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1791 if (!(ctx->amask & AMASK_CIX))
1793 if (likely(rc != 31)) {
1795 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1797 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1802 if (!(ctx->amask & AMASK_MVI))
1809 if (!(ctx->amask & AMASK_CIX))
1811 if (likely(rc != 31)) {
1813 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1815 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1820 if (!(ctx->amask & AMASK_CIX))
1822 if (likely(rc != 31)) {
1824 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1826 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1831 if (!(ctx->amask & AMASK_MVI))
1838 if (!(ctx->amask & AMASK_MVI))
1845 if (!(ctx->amask & AMASK_MVI))
1852 if (!(ctx->amask & AMASK_MVI))
1859 if (!(ctx->amask & AMASK_MVI))
1866 if (!(ctx->amask & AMASK_MVI))
1873 if (!(ctx->amask & AMASK_MVI))
1880 if (!(ctx->amask & AMASK_MVI))
1887 if (!(ctx->amask & AMASK_MVI))
1894 if (!(ctx->amask & AMASK_MVI))
1901 if (!(ctx->amask & AMASK_MVI))
1908 if (!(ctx->amask & AMASK_MVI))
1915 if (!(ctx->amask & AMASK_FIX))
1917 if (likely(rc != 31)) {
1919 tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
1921 tcg_gen_movi_i64(cpu_ir[rc], 0);
1926 if (!(ctx->amask & AMASK_FIX))
1929 TCGv tmp1 = tcg_temp_new(TCG_TYPE_I32);
1931 tcg_gen_helper_1_1(helper_s_to_memory, tmp1, cpu_fir[ra]);
1933 TCGv tmp2 = tcg_const_i64(0);
1934 tcg_gen_helper_1_1(helper_s_to_memory, tmp1, tmp2);
1935 tcg_temp_free(tmp2);
1937 tcg_gen_ext_i32_i64(cpu_ir[rc], tmp1);
1938 tcg_temp_free(tmp1);
1946 /* HW_MTPR (PALcode) */
1947 #if defined (CONFIG_USER_ONLY)
1953 TCGv tmp1 = tcg_const_i32(insn & 0xFF);
1955 tcg_gen_helper(helper_mtpr, tmp1, cpu_ir[ra]);
1957 TCGv tmp2 = tcg_const_i64(0);
1958 tcg_gen_helper(helper_mtpr, tmp1, tmp2);
1959 tcg_temp_free(tmp2);
1961 tcg_temp_free(tmp1);
1967 /* HW_REI (PALcode) */
1968 #if defined (CONFIG_USER_ONLY)
1975 tcg_gen_helper_0_0(helper_hw_rei);
1980 tmp = tcg_temp_new(TCG_TYPE_I64);
1981 tcg_gen_addi_i64(tmp, cpu_ir[rb], (((int64_t)insn << 51) >> 51));
1983 tmp = tcg_const_i64(((int64_t)insn << 51) >> 51);
1984 tcg_gen_helper_0_1(helper_hw_ret, tmp);
1991 /* HW_ST (PALcode) */
1992 #if defined (CONFIG_USER_ONLY)
1999 addr = tcg_temp_new(TCG_TYPE_I64);
2001 tcg_gen_addi_i64(addr, cpu_ir[rb], disp12);
2003 tcg_gen_movi_i64(addr, disp12);
2007 val = tcg_temp_new(TCG_TYPE_I64);
2008 tcg_gen_movi_i64(val, 0);
2010 switch ((insn >> 12) & 0xF) {
2012 /* Longword physical access */
2013 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2016 /* Quadword physical access */
2017 tcg_gen_helper_0_2(helper_stq_raw, val, addr);
2020 /* Longword physical access with lock */
2021 tcg_gen_helper_1_2(helper_stl_c_raw, val, val, addr);
2024 /* Quadword physical access with lock */
2025 tcg_gen_helper_1_2(helper_stq_c_raw, val, val, addr);
2028 /* Longword virtual access */
2029 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2030 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2033 /* Quadword virtual access */
2034 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2035 tcg_gen_helper_0_2(helper_stq_raw, val, addr);
2056 /* Longword virtual access with alternate access mode */
2057 tcg_gen_helper_0_0(helper_set_alt_mode);
2058 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2059 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2060 tcg_gen_helper_0_0(helper_restore_mode);
2063 /* Quadword virtual access with alternate access mode */
2064 tcg_gen_helper_0_0(helper_set_alt_mode);
2065 tcg_gen_helper_1_1(helper_st_virt_to_phys, addr, addr);
2066 tcg_gen_helper_0_2(helper_stl_raw, val, addr);
2067 tcg_gen_helper_0_0(helper_restore_mode);
2078 tcg_temp_free(addr);
2085 gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2089 gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2093 gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2097 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2101 gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0, 0);
2105 gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0, 0);
2109 gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0, 0);
2113 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0, 0);
2117 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2121 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2125 gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2129 gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2133 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0, 0);
2137 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0, 0);
2141 gen_store_mem(ctx, &gen_qemu_stl_c, ra, rb, disp16, 0, 0, 1);
2145 gen_store_mem(ctx, &gen_qemu_stq_c, ra, rb, disp16, 0, 0, 1);
2150 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2151 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2156 gen_fbcond(ctx, &helper_cmpfeq, ra, disp16);
2161 gen_fbcond(ctx, &helper_cmpflt, ra, disp16);
2166 gen_fbcond(ctx, &helper_cmpfle, ra, disp16);
2172 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2173 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2178 gen_fbcond(ctx, &helper_cmpfne, ra, disp16);
2183 gen_fbcond(ctx, &helper_cmpfge, ra, disp16);
2188 gen_fbcond(ctx, &helper_cmpfgt, ra, disp16);
2193 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2198 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2203 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2208 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2213 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2218 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2223 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2228 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2240 static always_inline void gen_intermediate_code_internal (CPUState *env,
2241 TranslationBlock *tb,
2244 #if defined ALPHA_DEBUG_DISAS
2245 static int insn_count;
2247 DisasContext ctx, *ctxp = &ctx;
2248 target_ulong pc_start;
2250 uint16_t *gen_opc_end;
2257 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2259 ctx.amask = env->amask;
2260 #if defined (CONFIG_USER_ONLY)
2263 ctx.mem_idx = ((env->ps >> 3) & 3);
2264 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2267 max_insns = tb->cflags & CF_COUNT_MASK;
2269 max_insns = CF_COUNT_MASK;
2272 for (ret = 0; ret == 0;) {
2273 if (env->nb_breakpoints > 0) {
2274 for(j = 0; j < env->nb_breakpoints; j++) {
2275 if (env->breakpoints[j] == ctx.pc) {
2276 gen_excp(&ctx, EXCP_DEBUG, 0);
2282 j = gen_opc_ptr - gen_opc_buf;
2286 gen_opc_instr_start[lj++] = 0;
2287 gen_opc_pc[lj] = ctx.pc;
2288 gen_opc_instr_start[lj] = 1;
2289 gen_opc_icount[lj] = num_insns;
2292 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2294 #if defined ALPHA_DEBUG_DISAS
2296 if (logfile != NULL) {
2297 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2298 ctx.pc, ctx.mem_idx);
2301 insn = ldl_code(ctx.pc);
2302 #if defined ALPHA_DEBUG_DISAS
2304 if (logfile != NULL) {
2305 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2310 ret = translate_one(ctxp, insn);
2313 /* if we reach a page boundary or are single stepping, stop
2316 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2317 (env->singlestep_enabled) ||
2318 num_insns >= max_insns) {
2321 #if defined (DO_SINGLE_STEP)
2325 if (ret != 1 && ret != 3) {
2326 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2328 #if defined (DO_TB_FLUSH)
2329 tcg_gen_helper_0_0(helper_tb_flush);
2331 if (tb->cflags & CF_LAST_IO)
2333 /* Generate the return instruction */
2335 gen_icount_end(tb, num_insns);
2336 *gen_opc_ptr = INDEX_op_end;
2338 j = gen_opc_ptr - gen_opc_buf;
2341 gen_opc_instr_start[lj++] = 0;
2343 tb->size = ctx.pc - pc_start;
2344 tb->icount = num_insns;
2346 #if defined ALPHA_DEBUG_DISAS
2347 if (loglevel & CPU_LOG_TB_CPU) {
2348 cpu_dump_state(env, logfile, fprintf, 0);
2350 if (loglevel & CPU_LOG_TB_IN_ASM) {
2351 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2352 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2353 fprintf(logfile, "\n");
2358 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2360 gen_intermediate_code_internal(env, tb, 0);
2363 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2365 gen_intermediate_code_internal(env, tb, 1);
2368 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2373 env = qemu_mallocz(sizeof(CPUAlphaState));
2377 alpha_translate_init();
2379 /* XXX: should not be hardcoded */
2380 env->implver = IMPLVER_2106x;
2382 #if defined (CONFIG_USER_ONLY)
2386 /* Initialize IPR */
2387 hwpcb = env->ipr[IPR_PCBB];
2388 env->ipr[IPR_ASN] = 0;
2389 env->ipr[IPR_ASTEN] = 0;
2390 env->ipr[IPR_ASTSR] = 0;
2391 env->ipr[IPR_DATFX] = 0;
2393 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2394 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2395 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2396 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2397 env->ipr[IPR_FEN] = 0;
2398 env->ipr[IPR_IPL] = 31;
2399 env->ipr[IPR_MCES] = 0;
2400 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2401 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2402 env->ipr[IPR_SISR] = 0;
2403 env->ipr[IPR_VIRBND] = -1ULL;
2408 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2409 unsigned long searched_pc, int pc_pos, void *puc)
2411 env->pc = gen_opc_pc[pc_pos];