4 * Copyright (c) 2005-2006 CodeSourcery
5 * Written by Paul Brook
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 #include "m68k-qreg.h"
33 static inline void qemu_assert(int cond, const char *msg)
36 fprintf (stderr, "badness: %s\n", msg);
41 /* internal defines */
42 typedef struct DisasContext {
47 struct TranslationBlock *tb;
48 int singlestep_enabled;
51 #define DISAS_JUMP_NEXT 4
53 /* XXX: move that elsewhere */
54 /* ??? Fix exceptions. */
55 static void *gen_throws_exception;
56 #define gen_last_qop NULL
58 static uint16_t *gen_opc_ptr;
59 static uint32_t *gen_opparam_ptr;
64 #define DEF(s, n, copy_size) INDEX_op_ ## s,
79 #define DREG(insn, pos) (((insn >> pos) & 7) + QREG_D0)
80 #define AREG(insn, pos) (((insn >> pos) & 7) + QREG_A0)
81 #define FREG(insn, pos) (((insn >> pos) & 7) + QREG_F0)
83 #define M68K_INSN_CF_A (1 << 0)
84 #define M68K_INSN_CF_B (1 << 1)
85 #define M68K_INSN_CF_C (1 << 2)
86 #define M68K_INSN_CF_MAC (1 << 3)
87 #define M68K_INSN_CF_EMAC (1 << 4)
88 #define M68K_INSN_CF_FPU (1 << 5)
95 static m68k_def_t m68k_cpu_defs[] = {
96 {"m5206", M68K_INSN_CF_A},
97 {"cfv4e", M68K_INSN_CF_A | M68K_INSN_CF_B | M68K_INSN_CF_C
98 | M68K_INSN_CF_MAC | M68K_INSN_CF_EMAC | M68K_INSN_CF_FPU},
102 typedef void (*disas_proc)(DisasContext *, uint16_t);
104 #define DISAS_INSN(name) \
105 static void disas_##name (DisasContext *s, uint16_t insn)
107 /* Generate a load from the specified address. Narrow values are
108 sign extended to full register width. */
109 static inline int gen_load(int opsize, int addr, int sign)
114 tmp = gen_new_qreg(QMODE_I32);
116 gen_op_ld8s32(tmp, addr);
118 gen_op_ld8u32(tmp, addr);
121 tmp = gen_new_qreg(QMODE_I32);
123 gen_op_ld16s32(tmp, addr);
125 gen_op_ld16u32(tmp, addr);
128 tmp = gen_new_qreg(QMODE_I32);
129 gen_op_ld32(tmp, addr);
132 tmp = gen_new_qreg(QMODE_F32);
133 gen_op_ldf32(tmp, addr);
136 tmp = gen_new_qreg(QMODE_F64);
137 gen_op_ldf64(tmp, addr);
140 qemu_assert(0, "bad load size");
142 gen_throws_exception = gen_last_qop;
146 /* Generate a store. */
147 static inline void gen_store(int opsize, int addr, int val)
151 gen_op_st8(addr, val);
154 gen_op_st16(addr, val);
157 gen_op_st32(addr, val);
160 gen_op_stf32(addr, val);
163 gen_op_stf64(addr, val);
166 qemu_assert(0, "bad store size");
168 gen_throws_exception = gen_last_qop;
171 /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
172 otherwise generate a store. */
173 static int gen_ldst(int opsize, int addr, int val)
176 gen_store(opsize, addr, val);
179 return gen_load(opsize, addr, val != 0);
183 /* Handle a base + index + displacement effective addresss. A base of
184 -1 means pc-relative. */
185 static int gen_lea_indexed(DisasContext *s, int opsize, int base)
196 tmp = ((ext >> 12) & 7) + ((ext & 0x8000) ? QREG_A0 : QREG_D0);
197 /* ??? Check W/L bit. */
198 scale = (ext >> 9) & 3;
202 add = gen_new_qreg(QMODE_I32);
203 gen_op_shl32(add, tmp, gen_im32(scale));
205 tmp = gen_new_qreg(QMODE_I32);
207 gen_op_add32(tmp, base, gen_im32((int8_t)ext));
208 gen_op_add32(tmp, tmp, add);
210 gen_op_add32(tmp, add, gen_im32(offset + (int8_t)ext));
215 /* Read a 32-bit immediate constant. */
216 static inline uint32_t read_im32(DisasContext *s)
219 im = ((uint32_t)lduw(s->pc)) << 16;
227 /* Update the CPU env CC_OP state. */
228 static inline void gen_flush_cc_op(DisasContext *s)
230 if (s->cc_op != CC_OP_DYNAMIC)
231 gen_op_mov32(QREG_CC_OP, gen_im32(s->cc_op));
234 /* Evaluate all the CC flags. */
235 static inline void gen_flush_flags(DisasContext *s)
237 if (s->cc_op == CC_OP_FLAGS)
239 gen_op_flush_flags(s->cc_op);
240 s->cc_op = CC_OP_FLAGS;
243 static inline int opsize_bytes(int opsize)
246 case OS_BYTE: return 1;
247 case OS_WORD: return 2;
248 case OS_LONG: return 4;
249 case OS_SINGLE: return 4;
250 case OS_DOUBLE: return 8;
252 qemu_assert(0, "bad operand size");
256 /* Assign value to a register. If the width is less than the register width
257 only the low part of the register is set. */
258 static void gen_partset_reg(int opsize, int reg, int val)
263 gen_op_and32(reg, reg, gen_im32(0xffffff00));
264 tmp = gen_new_qreg(QMODE_I32);
265 gen_op_and32(tmp, val, gen_im32(0xff));
266 gen_op_or32(reg, reg, tmp);
269 gen_op_and32(reg, reg, gen_im32(0xffff0000));
270 tmp = gen_new_qreg(QMODE_I32);
271 gen_op_and32(tmp, val, gen_im32(0xffff));
272 gen_op_or32(reg, reg, tmp);
275 gen_op_mov32(reg, val);
278 gen_op_pack_32_f32(reg, val);
281 qemu_assert(0, "Bad operand size");
286 /* Sign or zero extend a value. */
287 static inline int gen_extend(int val, int opsize, int sign)
293 tmp = gen_new_qreg(QMODE_I32);
295 gen_op_ext8s32(tmp, val);
297 gen_op_ext8u32(tmp, val);
300 tmp = gen_new_qreg(QMODE_I32);
302 gen_op_ext16s32(tmp, val);
304 gen_op_ext16u32(tmp, val);
310 tmp = gen_new_qreg(QMODE_F32);
311 gen_op_pack_f32_32(tmp, val);
314 qemu_assert(0, "Bad operand size");
319 /* Generate code for an "effective address". Does not adjust the base
320 register for autoincrememnt addressing modes. */
321 static int gen_lea(DisasContext *s, uint16_t insn, int opsize)
329 switch ((insn >> 3) & 7) {
330 case 0: /* Data register direct. */
331 case 1: /* Address register direct. */
332 /* ??? generate bad addressing mode fault. */
333 qemu_assert(0, "invalid addressing mode");
334 case 2: /* Indirect register */
335 case 3: /* Indirect postincrement. */
338 case 4: /* Indirect predecrememnt. */
340 tmp = gen_new_qreg(QMODE_I32);
341 gen_op_sub32(tmp, reg, gen_im32(opsize_bytes(opsize)));
343 case 5: /* Indirect displacement. */
345 tmp = gen_new_qreg(QMODE_I32);
348 gen_op_add32(tmp, reg, gen_im32((int16_t)ext));
350 case 6: /* Indirect index + displacement. */
352 return gen_lea_indexed(s, opsize, reg);
355 case 0: /* Absolute short. */
356 offset = ldsw(s->pc);
358 return gen_im32(offset);
359 case 1: /* Absolute long. */
360 offset = read_im32(s);
361 return gen_im32(offset);
362 case 2: /* pc displacement */
363 tmp = gen_new_qreg(QMODE_I32);
365 offset += ldsw(s->pc);
367 return gen_im32(offset);
368 case 3: /* pc index+displacement. */
369 return gen_lea_indexed(s, opsize, -1);
370 case 4: /* Immediate. */
372 /* ??? generate bad addressing mode fault. */
373 qemu_assert(0, "invalid addressing mode");
376 /* Should never happen. */
380 /* Helper function for gen_ea. Reuse the computed address between the
381 for read/write operands. */
382 static inline int gen_ea_once(DisasContext *s, uint16_t insn, int opsize,
387 if (addrp && val > 0) {
390 tmp = gen_lea(s, insn, opsize);
394 return gen_ldst(opsize, tmp, val);
397 /* Generate code to load/store a value ito/from an EA. If VAL > 0 this is
398 a write otherwise it is a read (0 == sign extend, -1 == zero extend).
399 ADDRP is non-null for readwrite operands. */
400 static int gen_ea(DisasContext *s, uint16_t insn, int opsize, int val,
408 switch ((insn >> 3) & 7) {
409 case 0: /* Data register direct. */
412 gen_partset_reg(opsize, reg, val);
415 return gen_extend(reg, opsize, val);
417 case 1: /* Address register direct. */
420 gen_op_mov32(reg, val);
423 return gen_extend(reg, opsize, val);
425 case 2: /* Indirect register */
427 return gen_ldst(opsize, reg, val);
428 case 3: /* Indirect postincrement. */
430 result = gen_ldst(opsize, reg, val);
431 /* ??? This is not exception safe. The instruction may still
432 fault after this point. */
433 if (val > 0 || !addrp)
434 gen_op_add32(reg, reg, gen_im32(opsize_bytes(opsize)));
436 case 4: /* Indirect predecrememnt. */
439 if (addrp && val > 0) {
442 tmp = gen_lea(s, insn, opsize);
446 result = gen_ldst(opsize, tmp, val);
447 /* ??? This is not exception safe. The instruction may still
448 fault after this point. */
449 if (val > 0 || !addrp) {
451 gen_op_mov32(reg, tmp);
455 case 5: /* Indirect displacement. */
456 case 6: /* Indirect index + displacement. */
457 return gen_ea_once(s, insn, opsize, val, addrp);
460 case 0: /* Absolute short. */
461 case 1: /* Absolute long. */
462 case 2: /* pc displacement */
463 case 3: /* pc index+displacement. */
464 return gen_ea_once(s, insn, opsize, val, addrp);
465 case 4: /* Immediate. */
466 /* Sign extend values for consistency. */
470 offset = ldsb(s->pc + 1);
472 offset = ldub(s->pc + 1);
477 offset = ldsw(s->pc);
479 offset = lduw(s->pc);
483 offset = read_im32(s);
486 qemu_assert(0, "Bad immediate operand");
488 return gen_im32(offset);
490 qemu_assert(0, "invalid addressing mode");
493 /* Should never happen. */
497 static void gen_logic_cc(DisasContext *s, int val)
499 gen_op_logic_cc(val);
500 s->cc_op = CC_OP_LOGIC;
503 static void gen_jmpcc(DisasContext *s, int cond, int l1)
514 case 2: /* HI (!C && !Z) */
515 tmp = gen_new_qreg(QMODE_I32);
516 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C | CCF_Z));
517 gen_op_jmp_z32(tmp, l1);
519 case 3: /* LS (C || Z) */
520 tmp = gen_new_qreg(QMODE_I32);
521 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C | CCF_Z));
522 gen_op_jmp_nz32(tmp, l1);
524 case 4: /* CC (!C) */
525 tmp = gen_new_qreg(QMODE_I32);
526 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C));
527 gen_op_jmp_z32(tmp, l1);
530 tmp = gen_new_qreg(QMODE_I32);
531 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_C));
532 gen_op_jmp_nz32(tmp, l1);
534 case 6: /* NE (!Z) */
535 tmp = gen_new_qreg(QMODE_I32);
536 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
537 gen_op_jmp_z32(tmp, l1);
540 tmp = gen_new_qreg(QMODE_I32);
541 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
542 gen_op_jmp_nz32(tmp, l1);
544 case 8: /* VC (!V) */
545 tmp = gen_new_qreg(QMODE_I32);
546 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
547 gen_op_jmp_z32(tmp, l1);
550 tmp = gen_new_qreg(QMODE_I32);
551 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
552 gen_op_jmp_nz32(tmp, l1);
554 case 10: /* PL (!N) */
555 tmp = gen_new_qreg(QMODE_I32);
556 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_N));
557 gen_op_jmp_z32(tmp, l1);
559 case 11: /* MI (N) */
560 tmp = gen_new_qreg(QMODE_I32);
561 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_N));
562 gen_op_jmp_nz32(tmp, l1);
564 case 12: /* GE (!(N ^ V)) */
565 tmp = gen_new_qreg(QMODE_I32);
566 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
567 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
568 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
569 gen_op_jmp_z32(tmp, l1);
571 case 13: /* LT (N ^ V) */
572 tmp = gen_new_qreg(QMODE_I32);
573 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
574 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
575 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
576 gen_op_jmp_nz32(tmp, l1);
578 case 14: /* GT (!(Z || (N ^ V))) */
581 l2 = gen_new_label();
582 tmp = gen_new_qreg(QMODE_I32);
583 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
584 gen_op_jmp_nz32(tmp, l2);
585 tmp = gen_new_qreg(QMODE_I32);
586 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
587 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
588 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
589 gen_op_jmp_nz32(tmp, l2);
594 case 15: /* LE (Z || (N ^ V)) */
595 tmp = gen_new_qreg(QMODE_I32);
596 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_Z));
597 gen_op_jmp_nz32(tmp, l1);
598 tmp = gen_new_qreg(QMODE_I32);
599 gen_op_shr32(tmp, QREG_CC_DEST, gen_im32(2));
600 gen_op_xor32(tmp, tmp, QREG_CC_DEST);
601 gen_op_and32(tmp, tmp, gen_im32(CCF_V));
602 gen_op_jmp_nz32(tmp, l1);
605 /* Should ever happen. */
616 l1 = gen_new_label();
617 cond = (insn >> 8) & 0xf;
619 gen_op_and32(reg, reg, gen_im32(0xffffff00));
620 gen_jmpcc(s, cond ^ 1, l1);
621 gen_op_or32(reg, reg, gen_im32(0xff));
625 /* Generate a jump to to the address in qreg DEST. */
626 static void gen_jmp(DisasContext *s, int dest)
629 gen_op_mov32(QREG_PC, dest);
630 s->is_jmp = DISAS_JUMP;
633 static void gen_exception(DisasContext *s, uint32_t where, int nr)
636 gen_jmp(s, gen_im32(where));
637 gen_op_raise_exception(nr);
640 /* Generate a jump to an immediate address. */
641 static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
643 TranslationBlock *tb;
646 if (__builtin_expect (s->singlestep_enabled, 0)) {
647 gen_exception(s, dest, EXCP_DEBUG);
648 } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
649 (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
650 gen_op_goto_tb(0, n, (long)tb);
651 gen_op_mov32(QREG_PC, gen_im32(dest));
652 gen_op_mov32(QREG_T0, gen_im32((long)tb + n));
655 gen_jmp(s, gen_im32(dest));
656 gen_op_mov32(QREG_T0, gen_im32(0));
659 s->is_jmp = DISAS_TB_JUMP;
662 DISAS_INSN(undef_mac)
664 gen_exception(s, s->pc - 2, EXCP_LINEA);
667 DISAS_INSN(undef_fpu)
669 gen_exception(s, s->pc - 2, EXCP_LINEF);
674 gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
675 cpu_abort(cpu_single_env, "Illegal instruction: %04x @ %08x",
686 sign = (insn & 0x100) != 0;
688 tmp = gen_new_qreg(QMODE_I32);
690 gen_op_ext16s32(tmp, reg);
692 gen_op_ext16u32(tmp, reg);
693 src = gen_ea(s, insn, OS_WORD, sign ? -1 : 0, NULL);
694 gen_op_mul32(tmp, tmp, src);
695 gen_op_mov32(reg, tmp);
696 /* Unlike m68k, coldfire always clears the overflow bit. */
697 gen_logic_cc(s, tmp);
707 sign = (insn & 0x100) != 0;
710 gen_op_ext16s32(QREG_DIV1, reg);
712 gen_op_ext16u32(QREG_DIV1, reg);
714 src = gen_ea(s, insn, OS_WORD, sign ? -1 : 0, NULL);
715 gen_op_mov32(QREG_DIV2, src);
722 tmp = gen_new_qreg(QMODE_I32);
723 src = gen_new_qreg(QMODE_I32);
724 gen_op_ext16u32(tmp, QREG_DIV1);
725 gen_op_shl32(src, QREG_DIV2, gen_im32(16));
726 gen_op_or32(reg, tmp, src);
728 s->cc_op = CC_OP_FLAGS;
741 gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
746 gen_op_mov32(QREG_DIV1, num);
747 den = gen_ea(s, insn, OS_LONG, 0, NULL);
748 gen_op_mov32(QREG_DIV2, den);
756 gen_op_mov32 (reg, QREG_DIV1);
759 gen_op_mov32 (reg, QREG_DIV2);
762 s->cc_op = CC_OP_FLAGS;
774 add = (insn & 0x4000) != 0;
776 dest = gen_new_qreg(QMODE_I32);
778 tmp = gen_ea(s, insn, OS_LONG, 0, &addr);
782 src = gen_ea(s, insn, OS_LONG, 0, NULL);
785 gen_op_add32(dest, tmp, src);
786 gen_op_update_xflag_lt(dest, src);
787 s->cc_op = CC_OP_ADD;
789 gen_op_update_xflag_lt(tmp, src);
790 gen_op_sub32(dest, tmp, src);
791 s->cc_op = CC_OP_SUB;
793 gen_op_update_cc_add(dest, src);
795 gen_ea(s, insn, OS_LONG, dest, &addr);
797 gen_op_mov32(reg, dest);
802 /* Reverse the order of the bits in REG. */
810 val = gen_new_qreg(QMODE_I32);
811 tmp1 = gen_new_qreg(QMODE_I32);
812 tmp2 = gen_new_qreg(QMODE_I32);
814 gen_op_mov32(val, reg);
815 /* Reverse bits within each nibble. */
816 gen_op_shl32(tmp1, val, gen_im32(3));
817 gen_op_and32(tmp1, tmp1, gen_im32(0x88888888));
818 gen_op_shl32(tmp2, val, gen_im32(1));
819 gen_op_and32(tmp2, tmp2, gen_im32(0x44444444));
820 gen_op_or32(tmp1, tmp1, tmp2);
821 gen_op_shr32(tmp2, val, gen_im32(1));
822 gen_op_and32(tmp2, tmp2, gen_im32(0x22222222));
823 gen_op_or32(tmp1, tmp1, tmp2);
824 gen_op_shr32(tmp2, val, gen_im32(3));
825 gen_op_and32(tmp2, tmp2, gen_im32(0x11111111));
826 gen_op_or32(tmp1, tmp1, tmp2);
827 /* Reverse nibbles withing bytes. */
828 gen_op_shl32(val, tmp1, gen_im32(4));
829 gen_op_and32(val, val, gen_im32(0xf0f0f0f0));
830 gen_op_shr32(tmp2, tmp1, gen_im32(4));
831 gen_op_and32(tmp2, tmp2, gen_im32(0x0f0f0f0f));
832 gen_op_or32(val, val, tmp2);
834 gen_op_bswap32(reg, val);
835 gen_op_mov32(reg, val);
838 DISAS_INSN(bitop_reg)
848 if ((insn & 0x38) != 0)
852 op = (insn >> 6) & 3;
853 src1 = gen_ea(s, insn, opsize, 0, op ? &addr: NULL);
854 src2 = DREG(insn, 9);
855 dest = gen_new_qreg(QMODE_I32);
858 tmp = gen_new_qreg(QMODE_I32);
859 if (opsize == OS_BYTE)
860 gen_op_and32(tmp, src2, gen_im32(7));
862 gen_op_and32(tmp, src2, gen_im32(31));
864 tmp = gen_new_qreg(QMODE_I32);
865 gen_op_shl32(tmp, gen_im32(1), src2);
867 gen_op_btest(src1, tmp);
870 gen_op_xor32(dest, src1, tmp);
873 gen_op_not32(tmp, tmp);
874 gen_op_and32(dest, src1, tmp);
877 gen_op_or32(dest, src1, tmp);
883 gen_ea(s, insn, opsize, dest, &addr);
893 tmp = gen_new_qreg(QMODE_I32);
895 gen_op_and32(tmp, QREG_CC_DEST, gen_im32(CCF_V));
896 l1 = gen_new_label();
897 gen_op_jmp_z32(tmp, l1);
898 tmp = gen_new_qreg(QMODE_I32);
899 gen_op_shr32(tmp, reg, gen_im32(31));
900 gen_op_xor32(tmp, tmp, gen_im32(0x80000000));
901 gen_op_mov32(reg, tmp);
903 gen_logic_cc(s, tmp);
906 static void gen_push(int val)
910 tmp = gen_new_qreg(QMODE_I32);
911 gen_op_sub32(tmp, QREG_SP, gen_im32(4));
912 gen_store(OS_LONG, tmp, val);
913 gen_op_mov32(QREG_SP, tmp);
927 tmp = gen_lea(s, insn, OS_LONG);
928 addr = gen_new_qreg(QMODE_I32);
929 gen_op_mov32(addr, tmp);
930 is_load = ((insn & 0x0400) != 0);
931 for (i = 0; i < 16; i++, mask >>= 1) {
938 tmp = gen_load(OS_LONG, addr, 0);
939 gen_op_mov32(reg, tmp);
941 gen_store(OS_LONG, addr, reg);
944 gen_op_add32(addr, addr, gen_im32(4));
960 if ((insn & 0x38) != 0)
964 op = (insn >> 6) & 3;
966 bitnum = lduw(s->pc);
968 if (bitnum & 0xff00) {
969 disas_undef(s, insn);
973 src1 = gen_ea(s, insn, opsize, 0, op ? &addr: NULL);
976 tmp = gen_new_qreg(QMODE_I32);
977 if (opsize == OS_BYTE)
983 gen_op_btest(src1, gen_im32(mask));
985 dest = gen_new_qreg(QMODE_I32);
991 gen_op_xor32(dest, src1, gen_im32(mask));
994 gen_op_and32(dest, src1, gen_im32(~mask));
997 gen_op_or32(dest, src1, gen_im32(mask));
1003 gen_ea(s, insn, opsize, dest, &addr);
1006 DISAS_INSN(arith_im)
1014 op = (insn >> 9) & 7;
1015 src1 = gen_ea(s, insn, OS_LONG, 0, (op == 6) ? NULL : &addr);
1016 src2 = gen_im32(read_im32(s));
1017 dest = gen_new_qreg(QMODE_I32);
1020 gen_op_or32(dest, src1, src2);
1021 gen_logic_cc(s, dest);
1024 gen_op_and32(dest, src1, src2);
1025 gen_logic_cc(s, dest);
1028 gen_op_mov32(dest, src1);
1029 gen_op_update_xflag_lt(dest, src2);
1030 gen_op_sub32(dest, dest, src2);
1031 gen_op_update_cc_add(dest, src2);
1032 s->cc_op = CC_OP_SUB;
1035 gen_op_mov32(dest, src1);
1036 gen_op_add32(dest, dest, src2);
1037 gen_op_update_cc_add(dest, src2);
1038 gen_op_update_xflag_lt(dest, src2);
1039 s->cc_op = CC_OP_ADD;
1042 gen_op_xor32(dest, src1, src2);
1043 gen_logic_cc(s, dest);
1046 gen_op_mov32(dest, src1);
1047 gen_op_sub32(dest, dest, src2);
1048 gen_op_update_cc_add(dest, src2);
1049 s->cc_op = CC_OP_SUB;
1055 gen_ea(s, insn, OS_LONG, dest, &addr);
1063 reg = DREG(insn, 0);
1064 gen_op_bswap32(reg, reg);
1074 switch (insn >> 12) {
1075 case 1: /* move.b */
1078 case 2: /* move.l */
1081 case 3: /* move.w */
1087 src = gen_ea(s, insn, opsize, -1, NULL);
1088 op = (insn >> 6) & 7;
1091 /* The value will already have been sign extended. */
1092 dest = AREG(insn, 9);
1093 gen_op_mov32(dest, src);
1097 dest_ea = ((insn >> 9) & 7) | (op << 3);
1098 gen_ea(s, dest_ea, opsize, src, NULL);
1099 /* This will be correct because loads sign extend. */
1100 gen_logic_cc(s, src);
1111 reg = DREG(insn, 0);
1112 dest = gen_new_qreg(QMODE_I32);
1113 gen_op_mov32 (dest, gen_im32(0));
1114 gen_op_subx_cc(dest, reg);
1116 tmp = gen_new_qreg(QMODE_I32);
1117 gen_op_mov32 (tmp, QREG_CC_DEST);
1118 gen_op_update_cc_add(dest, reg);
1119 gen_op_mov32(reg, dest);
1120 s->cc_op = CC_OP_DYNAMIC;
1122 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1123 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1124 s->cc_op = CC_OP_FLAGS;
1132 reg = AREG(insn, 9);
1133 tmp = gen_lea(s, insn, OS_LONG);
1134 gen_op_mov32(reg, tmp);
1141 switch ((insn >> 6) & 3) {
1154 gen_ea (s, insn, opsize, gen_im32(0), NULL);
1155 gen_logic_cc(s, gen_im32(0));
1158 DISAS_INSN(move_from_ccr)
1164 dest = gen_new_qreg(QMODE_I32);
1165 gen_op_get_xflag(dest);
1166 gen_op_shl32(dest, dest, gen_im32(4));
1167 gen_op_or32(dest, dest, QREG_CC_DEST);
1168 reg = DREG(insn, 0);
1169 gen_partset_reg(OS_WORD, reg, dest);
1177 reg = DREG(insn, 0);
1178 src1 = gen_new_qreg(QMODE_I32);
1179 gen_op_mov32(src1, reg);
1180 gen_op_neg32(reg, src1);
1181 s->cc_op = CC_OP_SUB;
1182 gen_op_update_cc_add(reg, src1);
1183 gen_op_update_xflag_lt(gen_im32(0), src1);
1184 s->cc_op = CC_OP_SUB;
1187 DISAS_INSN(move_to_ccr)
1192 s->cc_op = CC_OP_FLAGS;
1193 if ((insn & 0x38) == 0)
1195 src1 = gen_new_qreg(QMODE_I32);
1196 reg = DREG(insn, 0);
1197 gen_op_and32(src1, reg, gen_im32(0xf));
1198 gen_op_logic_cc(src1);
1199 gen_op_shr32(src1, reg, gen_im32(4));
1200 gen_op_and32(src1, src1, gen_im32(1));
1201 gen_op_update_xflag_tst(src1);
1203 else if ((insn & 0x3f) != 0x3c)
1208 gen_op_logic_cc(gen_im32(val & 0xf));
1209 gen_op_update_xflag_tst(gen_im32((val & 0x10) >> 4));
1212 disas_undef(s, insn);
1219 reg = DREG(insn, 0);
1220 gen_op_not32(reg, reg);
1221 gen_logic_cc(s, reg);
1231 dest = gen_new_qreg(QMODE_I32);
1232 src1 = gen_new_qreg(QMODE_I32);
1233 src2 = gen_new_qreg(QMODE_I32);
1234 reg = DREG(insn, 0);
1235 gen_op_shl32(src1, reg, gen_im32(16));
1236 gen_op_shr32(src2, reg, gen_im32(16));
1237 gen_op_or32(dest, src1, src2);
1238 gen_op_mov32(reg, dest);
1239 gen_logic_cc(s, dest);
1246 tmp = gen_lea(s, insn, OS_LONG);
1256 reg = DREG(insn, 0);
1257 op = (insn >> 6) & 7;
1258 tmp = gen_new_qreg(QMODE_I32);
1260 gen_op_ext16s32(tmp, reg);
1262 gen_op_ext8s32(tmp, reg);
1264 gen_partset_reg(OS_WORD, reg, tmp);
1266 gen_op_mov32(reg, tmp);
1267 gen_logic_cc(s, tmp);
1275 switch ((insn >> 6) & 3) {
1288 tmp = gen_ea(s, insn, opsize, -1, NULL);
1289 gen_logic_cc(s, tmp);
1294 /* Implemented as a NOP. */
1299 gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
1302 /* ??? This should be atomic. */
1309 dest = gen_new_qreg(QMODE_I32);
1310 src1 = gen_ea(s, insn, OS_BYTE, -1, &addr);
1311 gen_logic_cc(s, src1);
1312 gen_op_or32(dest, src1, gen_im32(0x80));
1313 gen_ea(s, insn, OS_BYTE, dest, &addr);
1323 /* The upper 32 bits of the product are discarded, so
1324 muls.l and mulu.l are functionally equivalent. */
1328 gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
1331 reg = DREG(ext, 12);
1332 src1 = gen_ea(s, insn, OS_LONG, 0, NULL);
1333 dest = gen_new_qreg(QMODE_I32);
1334 gen_op_mul32(dest, src1, reg);
1335 gen_op_mov32(reg, dest);
1336 /* Unlike m68k, coldfire always clears the overflow bit. */
1337 gen_logic_cc(s, dest);
1346 offset = ldsw(s->pc);
1348 reg = AREG(insn, 0);
1349 tmp = gen_new_qreg(QMODE_I32);
1350 gen_op_sub32(tmp, QREG_SP, gen_im32(4));
1351 gen_store(OS_LONG, tmp, reg);
1353 gen_op_mov32(reg, tmp);
1354 gen_op_add32(QREG_SP, tmp, gen_im32(offset));
1363 src = gen_new_qreg(QMODE_I32);
1364 reg = AREG(insn, 0);
1365 gen_op_mov32(src, reg);
1366 tmp = gen_load(OS_LONG, src, 0);
1367 gen_op_mov32(reg, tmp);
1368 gen_op_add32(QREG_SP, src, gen_im32(4));
1379 tmp = gen_load(OS_LONG, QREG_SP, 0);
1380 gen_op_add32(QREG_SP, QREG_SP, gen_im32(4));
1388 /* Load the target address first to ensure correct exception
1390 tmp = gen_lea(s, insn, OS_LONG);
1391 if ((insn & 0x40) == 0) {
1393 gen_push(gen_im32(s->pc));
1406 src1 = gen_ea(s, insn, OS_LONG, 0, &addr);
1407 val = (insn >> 9) & 7;
1410 src2 = gen_im32(val);
1411 dest = gen_new_qreg(QMODE_I32);
1412 gen_op_mov32(dest, src1);
1413 if ((insn & 0x38) == 0x08) {
1414 /* Don't update condition codes if the destination is an
1415 address register. */
1416 if (insn & 0x0100) {
1417 gen_op_sub32(dest, dest, src2);
1419 gen_op_add32(dest, dest, src2);
1422 if (insn & 0x0100) {
1423 gen_op_update_xflag_lt(dest, src2);
1424 gen_op_sub32(dest, dest, src2);
1425 s->cc_op = CC_OP_SUB;
1427 gen_op_add32(dest, dest, src2);
1428 gen_op_update_xflag_lt(dest, src2);
1429 s->cc_op = CC_OP_ADD;
1431 gen_op_update_cc_add(dest, src2);
1433 gen_ea(s, insn, OS_LONG, dest, &addr);
1439 case 2: /* One extension word. */
1442 case 3: /* Two extension words. */
1445 case 4: /* No extension words. */
1448 disas_undef(s, insn);
1460 op = (insn >> 8) & 0xf;
1461 offset = (int8_t)insn;
1463 offset = ldsw(s->pc);
1465 } else if (offset == -1) {
1466 offset = read_im32(s);
1470 gen_push(gen_im32(s->pc));
1475 l1 = gen_new_label();
1476 gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
1477 gen_jmp_tb(s, 1, base + offset);
1479 gen_jmp_tb(s, 0, s->pc);
1481 /* Unconditional branch. */
1482 gen_jmp_tb(s, 0, base + offset);
1490 tmp = gen_im32((int8_t)insn);
1491 gen_op_mov32(DREG(insn, 9), tmp);
1492 gen_logic_cc(s, tmp);
1505 src = gen_ea(s, insn, opsize, (insn & 0x80) ? 0 : -1, NULL);
1506 reg = DREG(insn, 9);
1507 gen_op_mov32(reg, src);
1508 gen_logic_cc(s, src);
1518 reg = DREG(insn, 9);
1519 dest = gen_new_qreg(QMODE_I32);
1521 src = gen_ea(s, insn, OS_LONG, 0, &addr);
1522 gen_op_or32(dest, src, reg);
1523 gen_ea(s, insn, OS_LONG, dest, &addr);
1525 src = gen_ea(s, insn, OS_LONG, 0, NULL);
1526 gen_op_or32(dest, src, reg);
1527 gen_op_mov32(reg, dest);
1529 gen_logic_cc(s, dest);
1537 src = gen_ea(s, insn, OS_LONG, 0, NULL);
1538 reg = AREG(insn, 9);
1539 gen_op_sub32(reg, reg, src);
1550 reg = DREG(insn, 9);
1551 src = DREG(insn, 0);
1552 dest = gen_new_qreg(QMODE_I32);
1553 gen_op_mov32 (dest, reg);
1554 gen_op_subx_cc(dest, src);
1556 tmp = gen_new_qreg(QMODE_I32);
1557 gen_op_mov32 (tmp, QREG_CC_DEST);
1558 gen_op_update_cc_add(dest, src);
1559 gen_op_mov32(reg, dest);
1560 s->cc_op = CC_OP_DYNAMIC;
1562 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1563 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1564 s->cc_op = CC_OP_FLAGS;
1572 val = (insn >> 9) & 7;
1575 src = gen_im32(val);
1576 gen_logic_cc(s, src);
1577 gen_ea(s, insn, OS_LONG, src, NULL);
1588 op = (insn >> 6) & 3;
1592 s->cc_op = CC_OP_CMPB;
1596 s->cc_op = CC_OP_CMPW;
1600 s->cc_op = CC_OP_SUB;
1605 src = gen_ea(s, insn, opsize, -1, NULL);
1606 reg = DREG(insn, 9);
1607 dest = gen_new_qreg(QMODE_I32);
1608 gen_op_sub32(dest, reg, src);
1609 gen_op_update_cc_add(dest, src);
1624 src = gen_ea(s, insn, opsize, -1, NULL);
1625 reg = AREG(insn, 9);
1626 dest = gen_new_qreg(QMODE_I32);
1627 gen_op_sub32(dest, reg, src);
1628 gen_op_update_cc_add(dest, src);
1629 s->cc_op = CC_OP_SUB;
1639 src = gen_ea(s, insn, OS_LONG, 0, &addr);
1640 reg = DREG(insn, 9);
1641 dest = gen_new_qreg(QMODE_I32);
1642 gen_op_xor32(dest, src, reg);
1643 gen_logic_cc(s, dest);
1644 gen_ea(s, insn, OS_LONG, dest, &addr);
1654 reg = DREG(insn, 9);
1655 dest = gen_new_qreg(QMODE_I32);
1657 src = gen_ea(s, insn, OS_LONG, 0, &addr);
1658 gen_op_and32(dest, src, reg);
1659 gen_ea(s, insn, OS_LONG, dest, &addr);
1661 src = gen_ea(s, insn, OS_LONG, 0, NULL);
1662 gen_op_and32(dest, src, reg);
1663 gen_op_mov32(reg, dest);
1665 gen_logic_cc(s, dest);
1673 src = gen_ea(s, insn, OS_LONG, 0, NULL);
1674 reg = AREG(insn, 9);
1675 gen_op_add32(reg, reg, src);
1686 reg = DREG(insn, 9);
1687 src = DREG(insn, 0);
1688 dest = gen_new_qreg(QMODE_I32);
1689 gen_op_mov32 (dest, reg);
1690 gen_op_addx_cc(dest, src);
1692 tmp = gen_new_qreg(QMODE_I32);
1693 gen_op_mov32 (tmp, QREG_CC_DEST);
1694 gen_op_update_cc_add(dest, src);
1695 gen_op_mov32(reg, dest);
1696 s->cc_op = CC_OP_DYNAMIC;
1698 gen_op_or32(tmp, tmp, gen_im32(~CCF_Z));
1699 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, tmp);
1700 s->cc_op = CC_OP_FLAGS;
1703 DISAS_INSN(shift_im)
1708 reg = DREG(insn, 0);
1709 tmp = (insn >> 9) & 7;
1713 gen_op_shl_im_cc(reg, tmp);
1714 s->cc_op = CC_OP_SHL;
1717 gen_op_shr_im_cc(reg, tmp);
1718 s->cc_op = CC_OP_SHR;
1720 gen_op_sar_im_cc(reg, tmp);
1721 s->cc_op = CC_OP_SAR;
1726 DISAS_INSN(shift_reg)
1732 reg = DREG(insn, 0);
1733 src = DREG(insn, 9);
1734 tmp = gen_new_qreg(QMODE_I32);
1735 gen_op_and32(tmp, src, gen_im32(63));
1737 gen_op_shl_cc(reg, tmp);
1738 s->cc_op = CC_OP_SHL;
1741 gen_op_shr_cc(reg, tmp);
1742 s->cc_op = CC_OP_SHR;
1744 gen_op_sar_cc(reg, tmp);
1745 s->cc_op = CC_OP_SAR;
1752 cpu_abort(NULL, "Unimplemented insn: ff1");
1764 gen_exception(s, addr, EXCP_UNSUPPORTED);
1766 gen_exception(s, addr, EXCP_PRIVILEGE);
1769 DISAS_INSN(move_from_sr)
1771 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1774 DISAS_INSN(move_to_sr)
1776 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1779 DISAS_INSN(move_from_usp)
1781 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1784 DISAS_INSN(move_to_usp)
1786 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1791 gen_exception(s, s->pc, EXCP_HLT);
1796 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1801 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1806 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1811 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1816 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1821 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1826 gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
1831 gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
1834 /* ??? FP exceptions are not implemented. Most exceptions are deferred until
1835 immediately before the next FP instruction is executed. */
1848 opmode = ext & 0x7f;
1849 switch ((ext >> 13) & 7) {
1854 case 3: /* fmove out */
1857 /* ??? TODO: Proper behavior on overflow. */
1858 switch ((ext >> 10) & 7) {
1861 res = gen_new_qreg(QMODE_I32);
1862 gen_op_f64_to_i32(res, src);
1866 res = gen_new_qreg(QMODE_F32);
1867 gen_op_f64_to_f32(res, src);
1871 res = gen_new_qreg(QMODE_I32);
1872 gen_op_f64_to_i32(res, src);
1880 res = gen_new_qreg(QMODE_I32);
1881 gen_op_f64_to_i32(res, src);
1886 gen_ea(s, insn, opsize, res, NULL);
1888 case 4: /* fmove to control register. */
1889 switch ((ext >> 10) & 7) {
1891 /* Not implemented. Ignore writes. */
1896 cpu_abort(NULL, "Unimplemented: fmove to control %d",
1900 case 5: /* fmove from control register. */
1901 switch ((ext >> 10) & 7) {
1903 /* Not implemented. Always return zero. */
1909 cpu_abort(NULL, "Unimplemented: fmove from control %d",
1913 gen_ea(s, insn, OS_LONG, res, NULL);
1915 case 6: /* fmovem */
1920 if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
1922 src = gen_lea(s, insn, OS_LONG);
1923 addr = gen_new_qreg(QMODE_I32);
1924 gen_op_mov32(addr, src);
1929 if (ext & (1 << 13)) {
1931 gen_op_stf64(addr, dest);
1934 gen_op_ldf64(dest, addr);
1936 if (ext & (mask - 1))
1937 gen_op_add32(addr, addr, gen_im32(8));
1945 if (ext & (1 << 14)) {
1948 /* Source effective address. */
1949 switch ((ext >> 10) & 7) {
1950 case 0: opsize = OS_LONG; break;
1951 case 1: opsize = OS_SINGLE; break;
1952 case 4: opsize = OS_WORD; break;
1953 case 5: opsize = OS_DOUBLE; break;
1954 case 6: opsize = OS_BYTE; break;
1958 tmp = gen_ea(s, insn, opsize, -1, NULL);
1959 if (opsize == OS_DOUBLE) {
1962 src = gen_new_qreg(QMODE_F64);
1967 gen_op_i32_to_f64(src, tmp);
1970 gen_op_f32_to_f64(src, tmp);
1975 /* Source register. */
1976 src = FREG(ext, 10);
1978 dest = FREG(ext, 7);
1979 res = gen_new_qreg(QMODE_F64);
1981 gen_op_movf64(res, dest);
1984 case 0: case 0x40: case 0x44: /* fmove */
1985 gen_op_movf64(res, src);
1988 gen_op_iround_f64(res, src);
1991 case 3: /* fintrz */
1992 gen_op_itrunc_f64(res, src);
1995 case 4: case 0x41: case 0x45: /* fsqrt */
1996 gen_op_sqrtf64(res, src);
1998 case 0x18: case 0x58: case 0x5c: /* fabs */
1999 gen_op_absf64(res, src);
2001 case 0x1a: case 0x5a: case 0x5e: /* fneg */
2002 gen_op_chsf64(res, src);
2004 case 0x20: case 0x60: case 0x64: /* fdiv */
2005 gen_op_divf64(res, res, src);
2007 case 0x22: case 0x62: case 0x66: /* fadd */
2008 gen_op_addf64(res, res, src);
2010 case 0x23: case 0x63: case 0x67: /* fmul */
2011 gen_op_mulf64(res, res, src);
2013 case 0x28: case 0x68: case 0x6c: /* fsub */
2014 gen_op_subf64(res, res, src);
2016 case 0x38: /* fcmp */
2017 gen_op_sub_cmpf64(res, res, src);
2021 case 0x3a: /* ftst */
2022 gen_op_movf64(res, src);
2030 if (opmode & 0x40) {
2031 if ((opmode & 0x4) != 0)
2033 } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
2040 tmp = gen_new_qreg(QMODE_F32);
2041 gen_op_f64_to_f32(tmp, res);
2042 gen_op_f32_to_f64(res, tmp);
2044 gen_op_fp_result(res);
2046 gen_op_movf64(dest, res);
2051 disas_undef_fpu(s, insn);
2063 offset = ldsw(s->pc);
2065 if (insn & (1 << 6)) {
2066 offset = (offset << 16) | lduw(s->pc);
2070 l1 = gen_new_label();
2071 /* TODO: Raise BSUN exception. */
2072 flag = gen_new_qreg(QMODE_I32);
2073 zero = gen_new_qreg(QMODE_F64);
2074 gen_op_zerof64(zero);
2075 gen_op_compare_quietf64(flag, QREG_FP_RESULT, zero);
2076 /* Jump to l1 if condition is true. */
2077 switch (insn & 0xf) {
2080 case 1: /* eq (=0) */
2081 gen_op_jmp_z32(flag, l1);
2083 case 2: /* ogt (=1) */
2084 gen_op_sub32(flag, flag, gen_im32(1));
2085 gen_op_jmp_z32(flag, l1);
2087 case 3: /* oge (=0 or =1) */
2088 gen_op_jmp_z32(flag, l1);
2089 gen_op_sub32(flag, flag, gen_im32(1));
2090 gen_op_jmp_z32(flag, l1);
2092 case 4: /* olt (=-1) */
2093 gen_op_jmp_s32(flag, l1);
2095 case 5: /* ole (=-1 or =0) */
2096 gen_op_jmp_s32(flag, l1);
2097 gen_op_jmp_z32(flag, l1);
2099 case 6: /* ogl (=-1 or =1) */
2100 gen_op_jmp_s32(flag, l1);
2101 gen_op_sub32(flag, flag, gen_im32(1));
2102 gen_op_jmp_z32(flag, l1);
2104 case 7: /* or (=2) */
2105 gen_op_sub32(flag, flag, gen_im32(2));
2106 gen_op_jmp_z32(flag, l1);
2108 case 8: /* un (<2) */
2109 gen_op_sub32(flag, flag, gen_im32(2));
2110 gen_op_jmp_s32(flag, l1);
2112 case 9: /* ueq (=0 or =2) */
2113 gen_op_jmp_z32(flag, l1);
2114 gen_op_sub32(flag, flag, gen_im32(2));
2115 gen_op_jmp_z32(flag, l1);
2117 case 10: /* ugt (>0) */
2118 /* ??? Add jmp_gtu. */
2119 gen_op_sub32(flag, flag, gen_im32(1));
2120 gen_op_jmp_ns32(flag, l1);
2122 case 11: /* uge (>=0) */
2123 gen_op_jmp_ns32(flag, l1);
2125 case 12: /* ult (=-1 or =2) */
2126 gen_op_jmp_s32(flag, l1);
2127 gen_op_sub32(flag, flag, gen_im32(2));
2128 gen_op_jmp_z32(flag, l1);
2130 case 13: /* ule (!=1) */
2131 gen_op_sub32(flag, flag, gen_im32(1));
2132 gen_op_jmp_nz32(flag, l1);
2134 case 14: /* ne (!=0) */
2135 gen_op_jmp_nz32(flag, l1);
2138 gen_op_mov32(flag, gen_im32(1));
2141 gen_jmp_tb(s, 0, s->pc);
2143 gen_jmp_tb(s, 1, addr + offset);
2146 static disas_proc opcode_table[65536];
2149 register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
2155 /* Sanity check. All set bits must be included in the mask. */
2158 /* This could probably be cleverer. For now just optimize the case where
2159 the top bits are known. */
2160 /* Find the first zero bit in the mask. */
2162 while ((i & mask) != 0)
2164 /* Iterate over all combinations of this and lower bits. */
2169 from = opcode & ~(i - 1);
2171 for (i = from; i < to; i++)
2173 if ((i & mask) == opcode)
2174 opcode_table[i] = proc;
2178 /* Register m68k opcode handlers. Order is important.
2179 Later insn override earlier ones. */
2181 register_m68k_insns (m68k_def_t *def)
2185 iflags = def->insns;
2186 #define INSN(name, opcode, mask, isa) \
2187 if (iflags & M68K_INSN_##isa) \
2188 register_opcode(disas_##name, 0x##opcode, 0x##mask)
2189 INSN(undef, 0000, 0000, CF_A);
2190 INSN(arith_im, 0080, fff8, CF_A);
2191 INSN(bitrev, 00c0, fff8, CF_C);
2192 INSN(bitop_reg, 0100, f1c0, CF_A);
2193 INSN(bitop_reg, 0140, f1c0, CF_A);
2194 INSN(bitop_reg, 0180, f1c0, CF_A);
2195 INSN(bitop_reg, 01c0, f1c0, CF_A);
2196 INSN(arith_im, 0280, fff8, CF_A);
2197 INSN(byterev, 02c0, fff8, CF_A);
2198 INSN(arith_im, 0480, fff8, CF_A);
2199 INSN(ff1, 04c0, fff8, CF_C);
2200 INSN(arith_im, 0680, fff8, CF_A);
2201 INSN(bitop_im, 0800, ffc0, CF_A);
2202 INSN(bitop_im, 0840, ffc0, CF_A);
2203 INSN(bitop_im, 0880, ffc0, CF_A);
2204 INSN(bitop_im, 08c0, ffc0, CF_A);
2205 INSN(arith_im, 0a80, fff8, CF_A);
2206 INSN(arith_im, 0c00, ff38, CF_A);
2207 INSN(move, 1000, f000, CF_A);
2208 INSN(move, 2000, f000, CF_A);
2209 INSN(move, 3000, f000, CF_A);
2210 INSN(strldsr, 40e7, ffff, CF_A);
2211 INSN(negx, 4080, fff8, CF_A);
2212 INSN(move_from_sr, 40c0, fff8, CF_A);
2213 INSN(lea, 41c0, f1c0, CF_A);
2214 INSN(clr, 4200, ff00, CF_A);
2215 INSN(undef, 42c0, ffc0, CF_A);
2216 INSN(move_from_ccr, 42c0, fff8, CF_A);
2217 INSN(neg, 4480, fff8, CF_A);
2218 INSN(move_to_ccr, 44c0, ffc0, CF_A);
2219 INSN(not, 4680, fff8, CF_A);
2220 INSN(move_to_sr, 46c0, ffc0, CF_A);
2221 INSN(pea, 4840, ffc0, CF_A);
2222 INSN(swap, 4840, fff8, CF_A);
2223 INSN(movem, 48c0, fbc0, CF_A);
2224 INSN(ext, 4880, fff8, CF_A);
2225 INSN(ext, 48c0, fff8, CF_A);
2226 INSN(ext, 49c0, fff8, CF_A);
2227 INSN(tst, 4a00, ff00, CF_A);
2228 INSN(tas, 4ac0, ffc0, CF_B);
2229 INSN(halt, 4ac8, ffff, CF_A);
2230 INSN(pulse, 4acc, ffff, CF_A);
2231 INSN(illegal, 4afc, ffff, CF_A);
2232 INSN(mull, 4c00, ffc0, CF_A);
2233 INSN(divl, 4c40, ffc0, CF_A);
2234 INSN(sats, 4c80, fff8, CF_B);
2235 INSN(trap, 4e40, fff0, CF_A);
2236 INSN(link, 4e50, fff8, CF_A);
2237 INSN(unlk, 4e58, fff8, CF_A);
2238 INSN(move_to_usp, 4e60, fff8, CF_B);
2239 INSN(move_from_usp, 4e68, fff8, CF_B);
2240 INSN(nop, 4e71, ffff, CF_A);
2241 INSN(stop, 4e72, ffff, CF_A);
2242 INSN(rte, 4e73, ffff, CF_A);
2243 INSN(rts, 4e75, ffff, CF_A);
2244 INSN(movec, 4e7b, ffff, CF_A);
2245 INSN(jump, 4e80, ffc0, CF_A);
2246 INSN(jump, 4ec0, ffc0, CF_A);
2247 INSN(addsubq, 5180, f1c0, CF_A);
2248 INSN(scc, 50c0, f0f8, CF_A);
2249 INSN(addsubq, 5080, f1c0, CF_A);
2250 INSN(tpf, 51f8, fff8, CF_A);
2251 INSN(branch, 6000, f000, CF_A);
2252 INSN(moveq, 7000, f100, CF_A);
2253 INSN(mvzs, 7100, f100, CF_B);
2254 INSN(or, 8000, f000, CF_A);
2255 INSN(divw, 80c0, f0c0, CF_A);
2256 INSN(addsub, 9000, f000, CF_A);
2257 INSN(subx, 9180, f1f8, CF_A);
2258 INSN(suba, 91c0, f1c0, CF_A);
2259 INSN(undef_mac, a000, f000, CF_A);
2260 INSN(mov3q, a140, f1c0, CF_B);
2261 INSN(cmp, b000, f1c0, CF_B); /* cmp.b */
2262 INSN(cmp, b040, f1c0, CF_B); /* cmp.w */
2263 INSN(cmpa, b0c0, f1c0, CF_B); /* cmpa.w */
2264 INSN(cmp, b080, f1c0, CF_A);
2265 INSN(cmpa, b1c0, f1c0, CF_A);
2266 INSN(eor, b180, f1c0, CF_A);
2267 INSN(and, c000, f000, CF_A);
2268 INSN(mulw, c0c0, f0c0, CF_A);
2269 INSN(addsub, d000, f000, CF_A);
2270 INSN(addx, d180, f1f8, CF_A);
2271 INSN(adda, d1c0, f1c0, CF_A);
2272 INSN(shift_im, e080, f0f0, CF_A);
2273 INSN(shift_reg, e0a0, f0f0, CF_A);
2274 INSN(undef_fpu, f000, f000, CF_A);
2275 INSN(fpu, f200, ffc0, CF_FPU);
2276 INSN(fbcc, f280, ffc0, CF_FPU);
2277 INSN(intouch, f340, ffc0, CF_A);
2278 INSN(cpushl, f428, ff38, CF_A);
2279 INSN(wddata, fb00, ff00, CF_A);
2280 INSN(wdebug, fbc0, ffc0, CF_A);
2284 /* ??? Some of this implementation is not exception safe. We should always
2285 write back the result to memory before setting the condition codes. */
2286 static void disas_m68k_insn(CPUState * env, DisasContext *s)
2293 opcode_table[insn](s, insn);
2297 /* Save the result of a floating point operation. */
2298 static void expand_op_fp_result(qOP *qop)
2300 gen_op_movf64(QREG_FP_RESULT, qop->args[0]);
2303 /* Dummy op to indicate that the flags have been set. */
2304 static void expand_op_flags_set(qOP *qop)
2308 /* Convert the confition codes into CC_OP_FLAGS format. */
2309 static void expand_op_flush_flags(qOP *qop)
2313 if (qop->args[0] == CC_OP_DYNAMIC)
2314 cc_opreg = QREG_CC_OP;
2316 cc_opreg = gen_im32(qop->args[0]);
2317 gen_op_helper32(QREG_NULL, cc_opreg, HELPER_flush_flags);
2320 /* Set CC_DEST after a logical or direct flag setting operation. */
2321 static void expand_op_logic_cc(qOP *qop)
2323 gen_op_mov32(QREG_CC_DEST, qop->args[0]);
2326 /* Set CC_SRC and CC_DEST after an arithmetic operation. */
2327 static void expand_op_update_cc_add(qOP *qop)
2329 gen_op_mov32(QREG_CC_DEST, qop->args[0]);
2330 gen_op_mov32(QREG_CC_SRC, qop->args[1]);
2333 /* Update the X flag. */
2334 static void expand_op_update_xflag(qOP *qop)
2339 arg0 = qop->args[0];
2340 arg1 = qop->args[1];
2341 if (arg1 == QREG_NULL) {
2343 gen_op_mov32(QREG_CC_X, arg0);
2345 /* CC_X = arg0 < (unsigned)arg1. */
2346 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2350 /* Set arg0 to the contents of the X flag. */
2351 static void expand_op_get_xflag(qOP *qop)
2353 gen_op_mov32(qop->args[0], QREG_CC_X);
2356 /* Expand a shift by immediate. The ISA only allows shifts by 1-8, so we
2357 already know the shift is within range. */
2358 static inline void expand_shift_im(qOP *qop, int right, int arith)
2368 val = gen_new_qreg(QMODE_I32);
2369 gen_op_mov32(val, reg);
2370 gen_op_mov32(QREG_CC_DEST, val);
2371 gen_op_mov32(QREG_CC_SRC, tmp);
2374 gen_op_sar32(reg, val, tmp);
2376 gen_op_shr32(reg, val, tmp);
2381 tmp = gen_im32(im - 1);
2383 gen_op_shl32(reg, val, tmp);
2384 tmp = gen_im32(32 - im);
2386 if (tmp != QREG_NULL)
2387 gen_op_shr32(val, val, tmp);
2388 gen_op_and32(QREG_CC_X, val, gen_im32(1));
2391 static void expand_op_shl_im_cc(qOP *qop)
2393 expand_shift_im(qop, 0, 0);
2396 static void expand_op_shr_im_cc(qOP *qop)
2398 expand_shift_im(qop, 1, 0);
2401 static void expand_op_sar_im_cc(qOP *qop)
2403 expand_shift_im(qop, 1, 1);
2406 /* Expand a shift by register. */
2407 /* ??? This gives incorrect answers for shifts by 0 or >= 32 */
2408 static inline void expand_shift_reg(qOP *qop, int right, int arith)
2416 shift = qop->args[1];
2417 val = gen_new_qreg(QMODE_I32);
2418 gen_op_mov32(val, reg);
2419 gen_op_mov32(QREG_CC_DEST, val);
2420 gen_op_mov32(QREG_CC_SRC, shift);
2421 tmp = gen_new_qreg(QMODE_I32);
2424 gen_op_sar32(reg, val, shift);
2426 gen_op_shr32(reg, val, shift);
2428 gen_op_sub32(tmp, shift, gen_im32(1));
2430 gen_op_shl32(reg, val, shift);
2431 gen_op_sub32(tmp, gen_im32(31), shift);
2433 gen_op_shl32(val, val, tmp);
2434 gen_op_and32(QREG_CC_X, val, gen_im32(1));
2437 static void expand_op_shl_cc(qOP *qop)
2439 expand_shift_reg(qop, 0, 0);
2442 static void expand_op_shr_cc(qOP *qop)
2444 expand_shift_reg(qop, 1, 0);
2447 static void expand_op_sar_cc(qOP *qop)
2449 expand_shift_reg(qop, 1, 1);
2452 /* Set the Z flag to (arg0 & arg1) == 0. */
2453 static void expand_op_btest(qOP *qop)
2458 l1 = gen_new_label();
2459 tmp = gen_new_qreg(QMODE_I32);
2460 gen_op_and32(tmp, qop->args[0], qop->args[1]);
2461 gen_op_and32(QREG_CC_DEST, QREG_CC_DEST, gen_im32(~(uint32_t)CCF_Z));
2462 gen_op_jmp_nz32(tmp, l1);
2463 gen_op_or32(QREG_CC_DEST, QREG_CC_DEST, gen_im32(CCF_Z));
2467 /* arg0 += arg1 + CC_X */
2468 static void expand_op_addx_cc(qOP *qop)
2470 int arg0 = qop->args[0];
2471 int arg1 = qop->args[1];
2474 gen_op_add32 (arg0, arg0, arg1);
2475 l1 = gen_new_label();
2476 l2 = gen_new_label();
2477 gen_op_jmp_z32(QREG_CC_X, l1);
2478 gen_op_add32(arg0, arg0, gen_im32(1));
2479 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_ADDX));
2480 gen_op_set_leu32(QREG_CC_X, arg0, arg1);
2483 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_ADD));
2484 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2488 /* arg0 -= arg1 + CC_X */
2489 static void expand_op_subx_cc(qOP *qop)
2491 int arg0 = qop->args[0];
2492 int arg1 = qop->args[1];
2495 l1 = gen_new_label();
2496 l2 = gen_new_label();
2497 gen_op_jmp_z32(QREG_CC_X, l1);
2498 gen_op_set_leu32(QREG_CC_X, arg0, arg1);
2499 gen_op_sub32(arg0, arg0, gen_im32(1));
2500 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_SUBX));
2503 gen_op_set_ltu32(QREG_CC_X, arg0, arg1);
2504 gen_op_mov32(QREG_CC_OP, gen_im32(CC_OP_SUB));
2506 gen_op_sub32 (arg0, arg0, arg1);
2509 /* Expand target specific ops to generic qops. */
2510 static void expand_target_qops(void)
2516 /* Copy the list of qops, expanding target specific ops as we go. */
2517 qop = gen_first_qop;
2518 gen_first_qop = NULL;
2519 gen_last_qop = NULL;
2520 for (; qop; qop = next) {
2523 if (c < FIRST_TARGET_OP) {
2524 qop->prev = gen_last_qop;
2527 gen_last_qop->next = qop;
2529 gen_first_qop = qop;
2534 #define DEF(name, nargs, barrier) \
2535 case INDEX_op_##name: \
2536 expand_op_##name(qop); \
2538 #include "qop-target.def"
2541 cpu_abort(NULL, "Unexpanded target qop");
2546 /* ??? Implement this. */
2548 optimize_flags(void)
2553 /* generate intermediate code for basic block 'tb'. */
2554 int gen_intermediate_code_internal(CPUState *env, TranslationBlock *tb,
2557 DisasContext dc1, *dc = &dc1;
2558 uint16_t *gen_opc_end;
2560 target_ulong pc_start;
2564 /* generate intermediate code */
2569 gen_opc_ptr = gen_opc_buf;
2570 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2571 gen_opparam_ptr = gen_opparam_buf;
2573 dc->is_jmp = DISAS_NEXT;
2575 dc->cc_op = CC_OP_DYNAMIC;
2576 dc->singlestep_enabled = env->singlestep_enabled;
2577 dc->fpcr = env->fpcr;
2582 pc_offset = dc->pc - pc_start;
2583 gen_throws_exception = NULL;
2584 if (env->nb_breakpoints > 0) {
2585 for(j = 0; j < env->nb_breakpoints; j++) {
2586 if (env->breakpoints[j] == dc->pc) {
2587 gen_exception(dc, dc->pc, EXCP_DEBUG);
2588 dc->is_jmp = DISAS_JUMP;
2596 j = gen_opc_ptr - gen_opc_buf;
2600 gen_opc_instr_start[lj++] = 0;
2602 gen_opc_pc[lj] = dc->pc;
2603 gen_opc_instr_start[lj] = 1;
2605 last_cc_op = dc->cc_op;
2606 disas_m68k_insn(env, dc);
2607 } while (!dc->is_jmp && gen_opc_ptr < gen_opc_end &&
2608 !env->singlestep_enabled &&
2609 (pc_offset) < (TARGET_PAGE_SIZE - 32));
2611 if (__builtin_expect(env->singlestep_enabled, 0)) {
2612 /* Make sure the pc is updated, and raise a debug exception. */
2614 gen_flush_cc_op(dc);
2615 gen_op_mov32(QREG_PC, gen_im32((long)dc->pc));
2617 gen_op_raise_exception(EXCP_DEBUG);
2619 switch(dc->is_jmp) {
2621 gen_flush_cc_op(dc);
2622 gen_jmp_tb(dc, 0, dc->pc);
2627 gen_flush_cc_op(dc);
2628 /* indicate that the hash table must be used to find the next TB */
2629 gen_op_mov32(QREG_T0, gen_im32(0));
2633 /* nothing more to generate */
2637 *gen_opc_ptr = INDEX_op_end;
2640 if (loglevel & CPU_LOG_TB_IN_ASM) {
2641 fprintf(logfile, "----------------\n");
2642 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2643 target_disas(logfile, pc_start, dc->pc - pc_start, 0);
2644 fprintf(logfile, "\n");
2645 if (loglevel & (CPU_LOG_TB_OP)) {
2646 fprintf(logfile, "OP:\n");
2647 dump_ops(gen_opc_buf, gen_opparam_buf);
2648 fprintf(logfile, "\n");
2653 j = gen_opc_ptr - gen_opc_buf;
2656 gen_opc_instr_start[lj++] = 0;
2659 tb->size = dc->pc - pc_start;
2663 //expand_target_qops();
2667 int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
2669 return gen_intermediate_code_internal(env, tb, 0);
2672 int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
2674 return gen_intermediate_code_internal(env, tb, 1);
2677 CPUM68KState *cpu_m68k_init(void)
2681 env = malloc(sizeof(CPUM68KState));
2686 memset(env, 0, sizeof(CPUM68KState));
2687 /* ??? FP regs should be initialized to NaN. */
2688 cpu_single_env = env;
2689 env->cc_op = CC_OP_FLAGS;
2693 void cpu_m68k_close(CPUM68KState *env)
2698 m68k_def_t *m68k_find_by_name(const char *name)
2702 def = m68k_cpu_defs;
2705 if (strcmp(def->name, name) == 0)
2712 void cpu_m68k_register(CPUM68KState *env, m68k_def_t *def)
2714 register_m68k_insns(def);
2717 void cpu_dump_state(CPUState *env, FILE *f,
2718 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
2724 for (i = 0; i < 8; i++)
2726 u.d = env->fregs[i];
2727 cpu_fprintf (f, "D%d = %08x A%d = %08x F%d = %08x%08x (%12g)\n",
2728 i, env->dregs[i], i, env->aregs[i],
2729 i, u.l.upper, u.l.lower, u.d);
2731 cpu_fprintf (f, "PC = %08x ", env->pc);
2733 cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
2734 (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
2735 (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
2736 cpu_fprintf (f, "FPRESULT = %12g\n", env->fp_result);
2740 target_ulong cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
2745 #if defined(CONFIG_USER_ONLY)
2747 int cpu_m68k_handle_mmu_fault (CPUState *env, target_ulong address, int rw,
2748 int is_user, int is_softmmu)
2750 env->exception_index = EXCP_ACCESS;
2751 env->mmu.ar = address;
2757 #error not implemented