4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 NPC/PC static optimisations (use JUMP_TB when possible)
27 Privileged instructions
28 Coprocessor-Instructions
29 Optimize synthetic instructions
30 Optional alignment and privileged instruction check
45 #define DYNAMIC_PC 1 /* dynamic pc value */
46 #define JUMP_PC 2 /* dynamic pc value which takes only two values
47 according to jump_pc[T2] */
49 typedef struct DisasContext {
50 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
51 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
52 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
55 struct TranslationBlock *tb;
58 static uint16_t *gen_opc_ptr;
59 static uint32_t *gen_opparam_ptr;
64 #define DEF(s,n,copy_size) INDEX_op_ ## s,
72 #define GET_FIELD(X, FROM, TO) \
73 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
75 #define IS_IMM (insn & (1<<13))
77 static void disas_sparc_insn(DisasContext * dc);
79 static GenOpFunc *gen_op_movl_TN_reg[2][32] = {
150 static GenOpFunc *gen_op_movl_reg_TN[3][32] = {
255 static GenOpFunc1 *gen_op_movl_TN_im[3] = {
261 #define GEN32(func, NAME) \
262 static GenOpFunc *NAME ## _table [32] = { \
263 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
264 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
265 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
266 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
267 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
268 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
269 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
270 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
272 static inline void func(int n) \
274 NAME ## _table[n](); \
277 /* floating point registers moves */
278 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
279 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
280 GEN32(gen_op_load_fpr_FT2, gen_op_load_fpr_FT2_fprf);
281 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
282 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
283 GEN32(gen_op_store_FT2_fpr, gen_op_store_FT2_fpr_fprf);
285 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
286 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
287 GEN32(gen_op_load_fpr_DT2, gen_op_load_fpr_DT2_fprf);
288 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
289 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
290 GEN32(gen_op_store_DT2_fpr, gen_op_store_DT2_fpr_fprf);
292 #if defined(CONFIG_USER_ONLY)
293 #define gen_op_ldst(name) gen_op_##name##_raw()
294 #define OP_LD_TABLE(width)
295 #define supervisor(dc) 0
297 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
298 #define OP_LD_TABLE(width) \
299 static GenOpFunc *gen_op_##width[] = { \
300 &gen_op_##width##_user, \
301 &gen_op_##width##_kernel, \
304 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
308 asi = GET_FIELD(insn, 19, 26); \
310 case 10: /* User data access */ \
311 gen_op_##width##_user(); \
313 case 11: /* Supervisor data access */ \
314 gen_op_##width##_kernel(); \
316 case 0x20 ... 0x2f: /* MMU passthrough */ \
318 gen_op_ld_asi(asi, size, sign); \
320 gen_op_st_asi(asi, size, sign); \
324 gen_op_ld_asi(asi, size, sign); \
326 gen_op_st_asi(asi, size, sign); \
331 #define supervisor(dc) (dc->mem_idx == 1)
351 static inline void gen_movl_imm_TN(int reg, int imm)
353 gen_op_movl_TN_im[reg] (imm);
356 static inline void gen_movl_imm_T1(int val)
358 gen_movl_imm_TN(1, val);
361 static inline void gen_movl_imm_T0(int val)
363 gen_movl_imm_TN(0, val);
366 static inline void gen_movl_reg_TN(int reg, int t)
369 gen_op_movl_reg_TN[t][reg] ();
371 gen_movl_imm_TN(t, 0);
374 static inline void gen_movl_reg_T0(int reg)
376 gen_movl_reg_TN(reg, 0);
379 static inline void gen_movl_reg_T1(int reg)
381 gen_movl_reg_TN(reg, 1);
384 static inline void gen_movl_reg_T2(int reg)
386 gen_movl_reg_TN(reg, 2);
389 static inline void gen_movl_TN_reg(int reg, int t)
392 gen_op_movl_TN_reg[t][reg] ();
395 static inline void gen_movl_T0_reg(int reg)
397 gen_movl_TN_reg(reg, 0);
400 static inline void gen_movl_T1_reg(int reg)
402 gen_movl_TN_reg(reg, 1);
405 /* call this function before using T2 as it may have been set for a jump */
406 static inline void flush_T2(DisasContext * dc)
408 if (dc->npc == JUMP_PC) {
409 gen_op_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
410 dc->npc = DYNAMIC_PC;
414 static inline void save_npc(DisasContext * dc)
416 if (dc->npc == JUMP_PC) {
417 gen_op_generic_branch(dc->jump_pc[0], dc->jump_pc[1]);
418 dc->npc = DYNAMIC_PC;
419 } else if (dc->npc != DYNAMIC_PC) {
420 gen_op_movl_npc_im(dc->npc);
424 static inline void save_state(DisasContext * dc)
426 gen_op_jmp_im(dc->pc);
430 static void gen_cond(int cond)
479 static void gen_fcond(int cond)
528 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn)
530 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
531 target_ulong target = dc->pc + offset;
534 /* unconditional not taken */
536 dc->pc = dc->npc + 4;
537 dc->npc = dc->pc + 4;
540 dc->npc = dc->pc + 4;
542 } else if (cond == 0x8) {
543 /* unconditional taken */
546 dc->npc = dc->pc + 4;
555 gen_op_branch_a((long)dc->tb, target, dc->npc);
559 dc->jump_pc[0] = target;
560 dc->jump_pc[1] = dc->npc + 4;
566 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn)
568 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
569 target_ulong target = dc->pc + offset;
572 /* unconditional not taken */
574 dc->pc = dc->npc + 4;
575 dc->npc = dc->pc + 4;
578 dc->npc = dc->pc + 4;
580 } else if (cond == 0x8) {
581 /* unconditional taken */
584 dc->npc = dc->pc + 4;
593 gen_op_branch_a((long)dc->tb, target, dc->npc);
597 dc->jump_pc[0] = target;
598 dc->jump_pc[1] = dc->npc + 4;
604 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
606 static int sign_extend(int x, int len)
609 return (x << len) >> len;
612 static void disas_sparc_insn(DisasContext * dc)
614 unsigned int insn, opc, rs1, rs2, rd;
616 insn = ldl_code(dc->pc);
617 opc = GET_FIELD(insn, 0, 1);
619 rd = GET_FIELD(insn, 2, 6);
621 case 0: /* branches/sethi */
623 unsigned int xop = GET_FIELD(insn, 7, 9);
625 target = GET_FIELD(insn, 10, 31);
627 case 0x0: /* UNIMPL */
628 case 0x1: /* V9 BPcc */
629 case 0x3: /* V9 BPr */
630 case 0x5: /* V9 FBPcc */
636 target = sign_extend(target, 22);
637 do_branch(dc, target, insn);
640 case 0x6: /* FBN+x */
642 #if !defined(CONFIG_USER_ONLY)
643 gen_op_trap_ifnofpu();
646 target = sign_extend(target, 22);
647 do_fbranch(dc, target, insn);
650 case 0x4: /* SETHI */
655 gen_movl_imm_T0(target << 10);
667 target_long target = GET_FIELDs(insn, 2, 31) << 2;
669 gen_op_movl_T0_im(dc->pc);
676 case 2: /* FPU & Logical Operations */
678 unsigned int xop = GET_FIELD(insn, 7, 12);
679 if (xop == 0x3a) { /* generate trap */
681 rs1 = GET_FIELD(insn, 13, 17);
682 gen_movl_reg_T0(rs1);
684 rs2 = GET_FIELD(insn, 25, 31);
688 gen_movl_imm_T1(rs2);
694 rs2 = GET_FIELD(insn, 27, 31);
698 gen_movl_reg_T1(rs2);
706 cond = GET_FIELD(insn, 3, 6);
711 } else if (cond != 0) {
715 } else if (xop == 0x28) {
716 rs1 = GET_FIELD(insn, 13, 17);
722 case 15: /* stbar / V9 membar */
723 break; /* no effect? */
725 case 0x2: /* V9 rdccr */
726 case 0x3: /* V9 rdasi */
727 case 0x4: /* V9 rdtick */
728 case 0x5: /* V9 rdpc */
729 case 0x6: /* V9 rdfprs */
732 #if !defined(CONFIG_USER_ONLY)
733 } else if (xop == 0x29) {
739 } else if (xop == 0x2a) {
745 } else if (xop == 0x2b) {
752 } else if (xop == 0x34) { /* FPU Operations */
753 #if !defined(CONFIG_USER_ONLY)
754 gen_op_trap_ifnofpu();
756 rs1 = GET_FIELD(insn, 13, 17);
757 rs2 = GET_FIELD(insn, 27, 31);
758 xop = GET_FIELD(insn, 18, 26);
760 case 0x1: /* fmovs */
761 gen_op_load_fpr_FT0(rs2);
762 gen_op_store_FT0_fpr(rd);
764 case 0x5: /* fnegs */
765 gen_op_load_fpr_FT1(rs2);
767 gen_op_store_FT0_fpr(rd);
769 case 0x9: /* fabss */
770 gen_op_load_fpr_FT1(rs2);
772 gen_op_store_FT0_fpr(rd);
774 case 0x29: /* fsqrts */
775 gen_op_load_fpr_FT1(rs2);
777 gen_op_store_FT0_fpr(rd);
779 case 0x2a: /* fsqrtd */
780 gen_op_load_fpr_DT1(rs2);
782 gen_op_store_DT0_fpr(rd);
784 case 0x2b: /* fsqrtq */
787 gen_op_load_fpr_FT0(rs1);
788 gen_op_load_fpr_FT1(rs2);
790 gen_op_store_FT0_fpr(rd);
793 gen_op_load_fpr_DT0(rs1);
794 gen_op_load_fpr_DT1(rs2);
796 gen_op_store_DT0_fpr(rd);
798 case 0x43: /* faddq */
801 gen_op_load_fpr_FT0(rs1);
802 gen_op_load_fpr_FT1(rs2);
804 gen_op_store_FT0_fpr(rd);
807 gen_op_load_fpr_DT0(rs1);
808 gen_op_load_fpr_DT1(rs2);
810 gen_op_store_DT0_fpr(rd);
812 case 0x47: /* fsubq */
815 gen_op_load_fpr_FT0(rs1);
816 gen_op_load_fpr_FT1(rs2);
818 gen_op_store_FT0_fpr(rd);
821 gen_op_load_fpr_DT0(rs1);
822 gen_op_load_fpr_DT1(rs2);
824 gen_op_store_DT0_fpr(rd);
826 case 0x4b: /* fmulq */
829 gen_op_load_fpr_FT0(rs1);
830 gen_op_load_fpr_FT1(rs2);
832 gen_op_store_FT0_fpr(rd);
835 gen_op_load_fpr_DT0(rs1);
836 gen_op_load_fpr_DT1(rs2);
838 gen_op_store_DT0_fpr(rd);
840 case 0x4f: /* fdivq */
843 gen_op_load_fpr_FT0(rs1);
844 gen_op_load_fpr_FT1(rs2);
846 gen_op_store_DT0_fpr(rd);
848 case 0x6e: /* fdmulq */
851 gen_op_load_fpr_FT1(rs2);
853 gen_op_store_FT0_fpr(rd);
856 gen_op_load_fpr_DT1(rs2);
858 gen_op_store_FT0_fpr(rd);
860 case 0xc7: /* fqtos */
863 gen_op_load_fpr_FT1(rs2);
865 gen_op_store_DT0_fpr(rd);
868 gen_op_load_fpr_FT1(rs2);
870 gen_op_store_DT0_fpr(rd);
872 case 0xcb: /* fqtod */
874 case 0xcc: /* fitoq */
876 case 0xcd: /* fstoq */
878 case 0xce: /* fdtoq */
881 gen_op_load_fpr_FT1(rs2);
883 gen_op_store_FT0_fpr(rd);
886 gen_op_load_fpr_DT1(rs2);
888 gen_op_store_FT0_fpr(rd);
890 case 0xd3: /* fqtoi */
893 case 0x2: /* V9 fmovd */
894 case 0x6: /* V9 fnegd */
895 case 0xa: /* V9 fabsd */
896 case 0x81: /* V9 fstox */
897 case 0x82: /* V9 fdtox */
898 case 0x84: /* V9 fxtos */
899 case 0x88: /* V9 fxtod */
901 case 0x3: /* V9 fmovq */
902 case 0x7: /* V9 fnegq */
903 case 0xb: /* V9 fabsq */
904 case 0x83: /* V9 fqtox */
905 case 0x8c: /* V9 fxtoq */
908 } else if (xop == 0x35) { /* FPU Operations */
909 #if !defined(CONFIG_USER_ONLY)
910 gen_op_trap_ifnofpu();
912 rs1 = GET_FIELD(insn, 13, 17);
913 rs2 = GET_FIELD(insn, 27, 31);
914 xop = GET_FIELD(insn, 18, 26);
915 /* V9 fmovscc: x5, cond = x >> 1 */
916 /* V9 fmovdcc: x6, cond = x >> 1 */
918 /* V9 fmovqcc: x7, cond = x >> 1 */
921 gen_op_load_fpr_FT0(rs1);
922 gen_op_load_fpr_FT1(rs2);
926 gen_op_load_fpr_DT0(rs1);
927 gen_op_load_fpr_DT1(rs2);
930 case 0x53: /* fcmpq */
932 case 0x55: /* fcmpes */
933 gen_op_load_fpr_FT0(rs1);
934 gen_op_load_fpr_FT1(rs2);
935 gen_op_fcmps(); /* XXX should trap if qNaN or sNaN */
937 case 0x56: /* fcmped */
938 gen_op_load_fpr_DT0(rs1);
939 gen_op_load_fpr_DT1(rs2);
940 gen_op_fcmpd(); /* XXX should trap if qNaN or sNaN */
942 case 0x57: /* fcmpeq */
948 } else if (xop == 0x2) {
951 rs1 = GET_FIELD(insn, 13, 17);
953 // or %g0, x, y -> mov T1, x; mov y, T1
954 if (IS_IMM) { /* immediate */
955 rs2 = GET_FIELDs(insn, 19, 31);
956 gen_movl_imm_T1(rs2);
957 } else { /* register */
958 rs2 = GET_FIELD(insn, 27, 31);
959 gen_movl_reg_T1(rs2);
963 gen_movl_reg_T0(rs1);
964 if (IS_IMM) { /* immediate */
965 // or x, #0, y -> mov T1, x; mov y, T1
966 rs2 = GET_FIELDs(insn, 19, 31);
968 gen_movl_imm_T1(rs2);
971 } else { /* register */
972 // or x, %g0, y -> mov T1, x; mov y, T1
973 rs2 = GET_FIELD(insn, 27, 31);
975 gen_movl_reg_T1(rs2);
982 } else if (xop < 0x38) {
983 rs1 = GET_FIELD(insn, 13, 17);
984 gen_movl_reg_T0(rs1);
985 if (IS_IMM) { /* immediate */
986 rs2 = GET_FIELDs(insn, 19, 31);
987 gen_movl_imm_T1(rs2);
988 } else { /* register */
989 rs2 = GET_FIELD(insn, 27, 31);
990 gen_movl_reg_T1(rs2);
993 switch (xop & ~0x10) {
996 gen_op_add_T1_T0_cc();
1003 gen_op_logic_T0_cc();
1008 gen_op_logic_T0_cc();
1013 gen_op_logic_T0_cc();
1017 gen_op_sub_T1_T0_cc();
1022 gen_op_andn_T1_T0();
1024 gen_op_logic_T0_cc();
1029 gen_op_logic_T0_cc();
1032 gen_op_xnor_T1_T0();
1034 gen_op_logic_T0_cc();
1038 gen_op_addx_T1_T0_cc();
1040 gen_op_addx_T1_T0();
1043 gen_op_umul_T1_T0();
1045 gen_op_logic_T0_cc();
1048 gen_op_smul_T1_T0();
1050 gen_op_logic_T0_cc();
1054 gen_op_subx_T1_T0_cc();
1056 gen_op_subx_T1_T0();
1059 gen_op_udiv_T1_T0();
1064 gen_op_sdiv_T1_T0();
1069 case 0x9: /* V9 mulx */
1070 case 0xd: /* V9 udivx */
1073 gen_movl_T0_reg(rd);
1076 case 0x20: /* taddcc */
1077 case 0x21: /* tsubcc */
1078 case 0x22: /* taddcctv */
1079 case 0x23: /* tsubcctv */
1081 case 0x24: /* mulscc */
1082 gen_op_mulscc_T1_T0();
1083 gen_movl_T0_reg(rd);
1085 case 0x25: /* sll, V9 sllx */
1087 gen_movl_T0_reg(rd);
1089 case 0x26: /* srl, V9 srlx */
1091 gen_movl_T0_reg(rd);
1093 case 0x27: /* sra, V9 srax */
1095 gen_movl_T0_reg(rd);
1105 case 0x2: /* V9 wrccr */
1106 case 0x3: /* V9 wrasi */
1107 case 0x6: /* V9 wrfprs */
1108 case 0xf: /* V9 sir */
1113 #if !defined(CONFIG_USER_ONLY)
1114 case 0x31: /* wrpsr, V9 saved, restored */
1116 if (!supervisor(dc))
1122 case 0x32: /* wrwim, V9 wrpr */
1124 if (!supervisor(dc))
1132 if (!supervisor(dc))
1140 case 0x2a: /* V9 rdpr */
1141 case 0x2b: /* V9 flushw */
1142 case 0x2c: /* V9 movcc */
1143 case 0x2d: /* V9 sdivx */
1144 case 0x2e: /* V9 popc */
1145 case 0x2f: /* V9 movr */
1150 rs1 = GET_FIELD(insn, 13, 17);
1151 gen_movl_reg_T0(rs1);
1152 if (IS_IMM) { /* immediate */
1153 rs2 = GET_FIELDs(insn, 19, 31);
1157 gen_movl_imm_T1(rs2);
1162 } else { /* register */
1163 rs2 = GET_FIELD(insn, 27, 31);
1167 gen_movl_reg_T1(rs2);
1174 case 0x38: /* jmpl */
1176 gen_op_movl_npc_T0();
1178 gen_op_movl_T0_im(dc->pc);
1179 gen_movl_T0_reg(rd);
1182 dc->npc = DYNAMIC_PC;
1185 #if !defined(CONFIG_USER_ONLY)
1186 case 0x39: /* rett, V9 return */
1188 if (!supervisor(dc))
1190 gen_op_movl_npc_T0();
1195 case 0x3b: /* flush */
1198 case 0x3c: /* save */
1201 gen_movl_T0_reg(rd);
1203 case 0x3d: /* restore */
1206 gen_movl_T0_reg(rd);
1209 case 0x3e: /* V9 done/retry */
1216 case 3: /* load/store instructions */
1218 unsigned int xop = GET_FIELD(insn, 7, 12);
1219 rs1 = GET_FIELD(insn, 13, 17);
1220 gen_movl_reg_T0(rs1);
1221 if (IS_IMM) { /* immediate */
1222 rs2 = GET_FIELDs(insn, 19, 31);
1226 gen_movl_imm_T1(rs2);
1231 } else { /* register */
1232 rs2 = GET_FIELD(insn, 27, 31);
1236 gen_movl_reg_T1(rs2);
1242 if (xop < 4 || (xop > 7 && xop < 0x14) || \
1243 (xop > 0x17 && xop < 0x20)) {
1245 case 0x0: /* load word */
1248 case 0x1: /* load unsigned byte */
1251 case 0x2: /* load unsigned halfword */
1254 case 0x3: /* load double word */
1256 gen_movl_T0_reg(rd + 1);
1258 case 0x9: /* load signed byte */
1261 case 0xa: /* load signed halfword */
1264 case 0xd: /* ldstub -- XXX: should be atomically */
1265 gen_op_ldst(ldstub);
1267 case 0x0f: /* swap register with memory. Also atomically */
1268 gen_movl_reg_T1(rd);
1271 #if !defined(CONFIG_USER_ONLY)
1272 case 0x10: /* load word alternate */
1273 if (!supervisor(dc))
1275 gen_op_lda(insn, 1, 4, 0);
1277 case 0x11: /* load unsigned byte alternate */
1278 if (!supervisor(dc))
1280 gen_op_lduba(insn, 1, 1, 0);
1282 case 0x12: /* load unsigned halfword alternate */
1283 if (!supervisor(dc))
1285 gen_op_lduha(insn, 1, 2, 0);
1287 case 0x13: /* load double word alternate */
1288 if (!supervisor(dc))
1290 gen_op_ldda(insn, 1, 8, 0);
1291 gen_movl_T0_reg(rd + 1);
1293 case 0x19: /* load signed byte alternate */
1294 if (!supervisor(dc))
1296 gen_op_ldsba(insn, 1, 1, 1);
1298 case 0x1a: /* load signed halfword alternate */
1299 if (!supervisor(dc))
1301 gen_op_ldsha(insn, 1, 2 ,1);
1303 case 0x1d: /* ldstuba -- XXX: should be atomically */
1304 if (!supervisor(dc))
1306 gen_op_ldstuba(insn, 1, 1, 0);
1308 case 0x1f: /* swap reg with alt. memory. Also atomically */
1309 if (!supervisor(dc))
1311 gen_movl_reg_T1(rd);
1312 gen_op_swapa(insn, 1, 4, 0);
1315 /* avoid warnings */
1316 (void) &gen_op_stfa;
1317 (void) &gen_op_stdfa;
1318 (void) &gen_op_ldfa;
1319 (void) &gen_op_lddfa;
1322 case 0x08: /* V9 ldsw */
1323 case 0x0b: /* V9 ldx */
1324 case 0x18: /* V9 ldswa */
1325 case 0x1b: /* V9 ldxa */
1326 case 0x2d: /* V9 prefetch */
1327 case 0x30: /* V9 ldfa */
1328 case 0x33: /* V9 lddfa */
1329 case 0x3d: /* V9 prefetcha */
1331 case 0x32: /* V9 ldqfa */
1334 gen_movl_T1_reg(rd);
1335 } else if (xop >= 0x20 && xop < 0x24) {
1336 #if !defined(CONFIG_USER_ONLY)
1337 gen_op_trap_ifnofpu();
1340 case 0x20: /* load fpreg */
1342 gen_op_store_FT0_fpr(rd);
1344 case 0x21: /* load fsr */
1346 gen_op_store_FT0_fpr(rd);
1348 case 0x22: /* load quad fpreg */
1350 case 0x23: /* load double fpreg */
1352 gen_op_store_DT0_fpr(rd);
1357 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18)) {
1358 gen_movl_reg_T1(rd);
1371 gen_movl_reg_T2(rd + 1);
1374 #if !defined(CONFIG_USER_ONLY)
1376 if (!supervisor(dc))
1378 gen_op_sta(insn, 0, 4, 0);
1381 if (!supervisor(dc))
1383 gen_op_stba(insn, 0, 1, 0);
1386 if (!supervisor(dc))
1388 gen_op_stha(insn, 0, 2, 0);
1391 if (!supervisor(dc))
1394 gen_movl_reg_T2(rd + 1);
1395 gen_op_stda(insn, 0, 8, 0);
1399 case 0x0e: /* V9 stx */
1400 case 0x1e: /* V9 stxa */
1403 } else if (xop > 0x23 && xop < 0x28) {
1404 #if !defined(CONFIG_USER_ONLY)
1405 gen_op_trap_ifnofpu();
1409 gen_op_load_fpr_FT0(rd);
1412 case 0x25: /* stfsr, V9 stxfsr */
1413 gen_op_load_fpr_FT0(rd);
1416 case 0x26: /* stdfq */
1419 gen_op_load_fpr_DT0(rd);
1423 case 0x34: /* V9 stfa */
1424 case 0x37: /* V9 stdfa */
1425 case 0x3c: /* V9 casa */
1426 case 0x3e: /* V9 casxa */
1428 case 0x36: /* V9 stqfa */
1431 } else if (xop > 0x33 && xop < 0x38) {
1440 /* default case for non jump instructions */
1441 if (dc->npc == DYNAMIC_PC) {
1442 dc->pc = DYNAMIC_PC;
1444 } else if (dc->npc == JUMP_PC) {
1445 /* we can do a static jump */
1446 gen_op_branch2((long)dc->tb, dc->jump_pc[0], dc->jump_pc[1]);
1450 dc->npc = dc->npc + 4;
1456 gen_op_exception(TT_ILL_INSN);
1459 #if !defined(CONFIG_USER_ONLY)
1462 gen_op_exception(TT_PRIV_INSN);
1468 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
1472 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
1473 int spc, CPUSPARCState *env)
1475 target_ulong pc_start, last_pc;
1476 uint16_t *gen_opc_end;
1477 DisasContext dc1, *dc = &dc1;
1480 memset(dc, 0, sizeof(DisasContext));
1485 dc->npc = (target_ulong) tb->cs_base;
1486 #if defined(CONFIG_USER_ONLY)
1489 dc->mem_idx = ((env->psrs) != 0);
1491 gen_opc_ptr = gen_opc_buf;
1492 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
1493 gen_opparam_ptr = gen_opparam_buf;
1496 if (env->nb_breakpoints > 0) {
1497 for(j = 0; j < env->nb_breakpoints; j++) {
1498 if (env->breakpoints[j] == dc->pc) {
1499 if (dc->pc != pc_start)
1511 fprintf(logfile, "Search PC...\n");
1512 j = gen_opc_ptr - gen_opc_buf;
1516 gen_opc_instr_start[lj++] = 0;
1517 gen_opc_pc[lj] = dc->pc;
1518 gen_opc_npc[lj] = dc->npc;
1519 gen_opc_instr_start[lj] = 1;
1523 disas_sparc_insn(dc);
1526 /* if the next PC is different, we abort now */
1527 if (dc->pc != (last_pc + 4))
1529 /* if single step mode, we generate only one instruction and
1530 generate an exception */
1531 if (env->singlestep_enabled) {
1532 gen_op_jmp_im(dc->pc);
1537 } while ((gen_opc_ptr < gen_opc_end) &&
1538 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
1542 if (dc->pc != DYNAMIC_PC &&
1543 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
1544 /* static PC and NPC: we can use direct chaining */
1545 gen_op_branch((long)tb, dc->pc, dc->npc);
1547 if (dc->pc != DYNAMIC_PC)
1548 gen_op_jmp_im(dc->pc);
1554 *gen_opc_ptr = INDEX_op_end;
1556 j = gen_opc_ptr - gen_opc_buf;
1559 gen_opc_instr_start[lj++] = 0;
1567 tb->size = last_pc + 4 - pc_start;
1570 if (loglevel & CPU_LOG_TB_IN_ASM) {
1571 fprintf(logfile, "--------------\n");
1572 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
1573 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
1574 fprintf(logfile, "\n");
1575 if (loglevel & CPU_LOG_TB_OP) {
1576 fprintf(logfile, "OP:\n");
1577 dump_ops(gen_opc_buf, gen_opparam_buf);
1578 fprintf(logfile, "\n");
1585 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
1587 return gen_intermediate_code_internal(tb, 0, env);
1590 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
1592 return gen_intermediate_code_internal(tb, 1, env);
1595 extern int ram_size;
1597 void cpu_reset(CPUSPARCState *env)
1599 memset(env, 0, sizeof(*env));
1603 env->regwptr = env->regbase + (env->cwp * 16);
1604 #if defined(CONFIG_USER_ONLY)
1605 env->user_mode_only = 1;
1608 env->pc = 0xffd00000;
1609 env->gregs[1] = ram_size;
1610 env->mmuregs[0] = (0x04 << 24); /* Impl 0, ver 4, MMU disabled */
1611 env->npc = env->pc + 4;
1615 CPUSPARCState *cpu_sparc_init(void)
1621 if (!(env = malloc(sizeof(CPUSPARCState))))
1623 cpu_single_env = env;
1628 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
1630 void cpu_dump_state(CPUState *env, FILE *f,
1631 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
1636 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
1637 cpu_fprintf(f, "General Registers:\n");
1638 for (i = 0; i < 4; i++)
1639 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
1640 cpu_fprintf(f, "\n");
1642 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
1643 cpu_fprintf(f, "\nCurrent Register Window:\n");
1644 for (x = 0; x < 3; x++) {
1645 for (i = 0; i < 4; i++)
1646 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
1647 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
1648 env->regwptr[i + x * 8]);
1649 cpu_fprintf(f, "\n");
1651 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
1652 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
1653 env->regwptr[i + x * 8]);
1654 cpu_fprintf(f, "\n");
1656 cpu_fprintf(f, "\nFloating Point Registers:\n");
1657 for (i = 0; i < 32; i++) {
1659 cpu_fprintf(f, "%%f%02d:", i);
1660 cpu_fprintf(f, " %016lf", env->fpr[i]);
1662 cpu_fprintf(f, "\n");
1664 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
1665 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
1666 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
1667 env->psrs?'S':'-', env->psrps?'P':'-',
1668 env->psret?'E':'-', env->wim);
1669 cpu_fprintf(f, "fsr: 0x%08x\n", env->fsr);
1672 #if defined(CONFIG_USER_ONLY)
1673 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
1679 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
1680 int *access_index, target_ulong address, int rw,
1683 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
1685 target_phys_addr_t phys_addr;
1686 int prot, access_index;
1688 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2, 0) != 0)
1694 void helper_flush(target_ulong addr)
1697 tb_invalidate_page_range(addr, addr + 8);