2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 #include "qemu-common.h"
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
40 /* Include definitions for instructions classes and implementations flags */
41 //#define DO_SINGLE_STEP
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
44 //#define OPTIMIZE_FPRF_UPDATE
46 /*****************************************************************************/
47 /* Code translation helpers */
49 /* global register indexes */
50 static TCGv_ptr cpu_env;
51 static char cpu_reg_names[10*3 + 22*4 /* GPR */
52 #if !defined(TARGET_PPC64)
53 + 10*4 + 22*5 /* SPE GPRh */
55 + 10*4 + 22*5 /* FPR */
56 + 2*(10*6 + 22*7) /* AVRh, AVRl */
58 static TCGv cpu_gpr[32];
59 #if !defined(TARGET_PPC64)
60 static TCGv cpu_gprh[32];
62 static TCGv_i64 cpu_fpr[32];
63 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
64 static TCGv_i32 cpu_crf[8];
70 static TCGv cpu_reserve;
71 static TCGv_i32 cpu_fpscr;
72 static TCGv_i32 cpu_access_type;
74 /* dyngen register indexes */
77 #include "gen-icount.h"
79 void ppc_translate_init(void)
83 static int done_init = 0;
88 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
89 #if TARGET_LONG_BITS > HOST_LONG_BITS
90 cpu_T[0] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t0), "T0");
91 cpu_T[1] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t1), "T1");
92 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
94 cpu_T[0] = tcg_global_reg_new(TCG_AREG1, "T0");
95 cpu_T[1] = tcg_global_reg_new(TCG_AREG2, "T1");
97 /* XXX: This is a temporary workaround for i386.
98 * On i386 qemu_st32 runs out of registers.
99 * The proper fix is to remove cpu_T.
101 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
103 cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2");
109 for (i = 0; i < 8; i++) {
110 sprintf(p, "crf%d", i);
111 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
112 offsetof(CPUState, crf[i]), p);
116 for (i = 0; i < 32; i++) {
117 sprintf(p, "r%d", i);
118 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
119 offsetof(CPUState, gpr[i]), p);
120 p += (i < 10) ? 3 : 4;
121 #if !defined(TARGET_PPC64)
122 sprintf(p, "r%dH", i);
123 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
124 offsetof(CPUState, gprh[i]), p);
125 p += (i < 10) ? 4 : 5;
128 sprintf(p, "fp%d", i);
129 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
130 offsetof(CPUState, fpr[i]), p);
131 p += (i < 10) ? 4 : 5;
133 sprintf(p, "avr%dH", i);
134 #ifdef WORDS_BIGENDIAN
135 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
136 offsetof(CPUState, avr[i].u64[0]), p);
138 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
139 offsetof(CPUState, avr[i].u64[1]), p);
141 p += (i < 10) ? 6 : 7;
143 sprintf(p, "avr%dL", i);
144 #ifdef WORDS_BIGENDIAN
145 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
146 offsetof(CPUState, avr[i].u64[1]), p);
148 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
149 offsetof(CPUState, avr[i].u64[0]), p);
151 p += (i < 10) ? 6 : 7;
154 cpu_nip = tcg_global_mem_new(TCG_AREG0,
155 offsetof(CPUState, nip), "nip");
157 cpu_msr = tcg_global_mem_new(TCG_AREG0,
158 offsetof(CPUState, msr), "msr");
160 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
161 offsetof(CPUState, ctr), "ctr");
163 cpu_lr = tcg_global_mem_new(TCG_AREG0,
164 offsetof(CPUState, lr), "lr");
166 cpu_xer = tcg_global_mem_new(TCG_AREG0,
167 offsetof(CPUState, xer), "xer");
169 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
170 offsetof(CPUState, reserve), "reserve");
172 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
173 offsetof(CPUState, fpscr), "fpscr");
175 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
176 offsetof(CPUState, access_type), "access_type");
178 /* register helpers */
185 #if defined(OPTIMIZE_FPRF_UPDATE)
186 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
187 static uint16_t **gen_fprf_ptr;
190 /* internal defines */
191 typedef struct DisasContext {
192 struct TranslationBlock *tb;
196 /* Routine used to access memory */
198 /* Translation flags */
199 #if !defined(CONFIG_USER_ONLY)
202 #if defined(TARGET_PPC64)
208 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
209 int singlestep_enabled;
212 struct opc_handler_t {
215 /* instruction type */
218 void (*handler)(DisasContext *ctx);
219 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
222 #if defined(DO_PPC_STATISTICS)
227 static always_inline void gen_reset_fpstatus (void)
229 #ifdef CONFIG_SOFTFLOAT
230 gen_op_reset_fpstatus();
234 static always_inline void gen_compute_fprf (TCGv_i64 arg, int set_fprf, int set_rc)
236 TCGv_i32 t0 = tcg_temp_new_i32();
239 /* This case might be optimized later */
240 #if defined(OPTIMIZE_FPRF_UPDATE)
241 *gen_fprf_ptr++ = gen_opc_ptr;
243 tcg_gen_movi_i32(t0, 1);
244 gen_helper_compute_fprf(t0, arg, t0);
245 if (unlikely(set_rc)) {
246 tcg_gen_mov_i32(cpu_crf[1], t0);
248 gen_helper_float_check_status();
249 } else if (unlikely(set_rc)) {
250 /* We always need to compute fpcc */
251 tcg_gen_movi_i32(t0, 0);
252 gen_helper_compute_fprf(t0, arg, t0);
253 tcg_gen_mov_i32(cpu_crf[1], t0);
255 gen_helper_float_check_status();
258 tcg_temp_free_i32(t0);
261 static always_inline void gen_optimize_fprf (void)
263 #if defined(OPTIMIZE_FPRF_UPDATE)
266 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
267 *ptr = INDEX_op_nop1;
268 gen_fprf_ptr = gen_fprf_buf;
272 static always_inline void gen_set_access_type(int access_type)
274 tcg_gen_movi_i32(cpu_access_type, access_type);
277 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
279 #if defined(TARGET_PPC64)
281 tcg_gen_movi_tl(cpu_nip, nip);
284 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
287 #define GEN_EXCP(ctx, excp, error) \
289 TCGv_i32 t0 = tcg_const_i32(excp); \
290 TCGv_i32 t1 = tcg_const_i32(error); \
291 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
292 gen_update_nip(ctx, (ctx)->nip); \
294 gen_helper_raise_exception_err(t0, t1); \
295 tcg_temp_free_i32(t0); \
296 tcg_temp_free_i32(t1); \
297 ctx->exception = (excp); \
300 #define GEN_EXCP_INVAL(ctx) \
301 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
302 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
304 #define GEN_EXCP_PRIVOPC(ctx) \
305 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
306 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
308 #define GEN_EXCP_PRIVREG(ctx) \
309 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
310 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
312 #define GEN_EXCP_NO_FP(ctx) \
313 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
315 #define GEN_EXCP_NO_AP(ctx) \
316 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
318 #define GEN_EXCP_NO_VR(ctx) \
319 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
321 /* Stop translation */
322 static always_inline void GEN_STOP (DisasContext *ctx)
324 gen_update_nip(ctx, ctx->nip);
325 ctx->exception = POWERPC_EXCP_STOP;
328 /* No need to update nip here, as execution flow will change */
329 static always_inline void GEN_SYNC (DisasContext *ctx)
331 ctx->exception = POWERPC_EXCP_SYNC;
334 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
335 static void gen_##name (DisasContext *ctx); \
336 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
337 static void gen_##name (DisasContext *ctx)
339 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
340 static void gen_##name (DisasContext *ctx); \
341 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
342 static void gen_##name (DisasContext *ctx)
344 typedef struct opcode_t {
345 unsigned char opc1, opc2, opc3;
346 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
347 unsigned char pad[5];
349 unsigned char pad[1];
351 opc_handler_t handler;
355 /*****************************************************************************/
356 /*** Instruction decoding ***/
357 #define EXTRACT_HELPER(name, shift, nb) \
358 static always_inline uint32_t name (uint32_t opcode) \
360 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
363 #define EXTRACT_SHELPER(name, shift, nb) \
364 static always_inline int32_t name (uint32_t opcode) \
366 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
370 EXTRACT_HELPER(opc1, 26, 6);
372 EXTRACT_HELPER(opc2, 1, 5);
374 EXTRACT_HELPER(opc3, 6, 5);
375 /* Update Cr0 flags */
376 EXTRACT_HELPER(Rc, 0, 1);
378 EXTRACT_HELPER(rD, 21, 5);
380 EXTRACT_HELPER(rS, 21, 5);
382 EXTRACT_HELPER(rA, 16, 5);
384 EXTRACT_HELPER(rB, 11, 5);
386 EXTRACT_HELPER(rC, 6, 5);
388 EXTRACT_HELPER(crfD, 23, 3);
389 EXTRACT_HELPER(crfS, 18, 3);
390 EXTRACT_HELPER(crbD, 21, 5);
391 EXTRACT_HELPER(crbA, 16, 5);
392 EXTRACT_HELPER(crbB, 11, 5);
394 EXTRACT_HELPER(_SPR, 11, 10);
395 static always_inline uint32_t SPR (uint32_t opcode)
397 uint32_t sprn = _SPR(opcode);
399 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
401 /*** Get constants ***/
402 EXTRACT_HELPER(IMM, 12, 8);
403 /* 16 bits signed immediate value */
404 EXTRACT_SHELPER(SIMM, 0, 16);
405 /* 16 bits unsigned immediate value */
406 EXTRACT_HELPER(UIMM, 0, 16);
408 EXTRACT_HELPER(NB, 11, 5);
410 EXTRACT_HELPER(SH, 11, 5);
412 EXTRACT_HELPER(MB, 6, 5);
414 EXTRACT_HELPER(ME, 1, 5);
416 EXTRACT_HELPER(TO, 21, 5);
418 EXTRACT_HELPER(CRM, 12, 8);
419 EXTRACT_HELPER(FM, 17, 8);
420 EXTRACT_HELPER(SR, 16, 4);
421 EXTRACT_HELPER(FPIMM, 12, 4);
423 /*** Jump target decoding ***/
425 EXTRACT_SHELPER(d, 0, 16);
426 /* Immediate address */
427 static always_inline target_ulong LI (uint32_t opcode)
429 return (opcode >> 0) & 0x03FFFFFC;
432 static always_inline uint32_t BD (uint32_t opcode)
434 return (opcode >> 0) & 0xFFFC;
437 EXTRACT_HELPER(BO, 21, 5);
438 EXTRACT_HELPER(BI, 16, 5);
439 /* Absolute/relative address */
440 EXTRACT_HELPER(AA, 1, 1);
442 EXTRACT_HELPER(LK, 0, 1);
444 /* Create a mask between <start> and <end> bits */
445 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
449 #if defined(TARGET_PPC64)
450 if (likely(start == 0)) {
451 ret = UINT64_MAX << (63 - end);
452 } else if (likely(end == 63)) {
453 ret = UINT64_MAX >> start;
456 if (likely(start == 0)) {
457 ret = UINT32_MAX << (31 - end);
458 } else if (likely(end == 31)) {
459 ret = UINT32_MAX >> start;
463 ret = (((target_ulong)(-1ULL)) >> (start)) ^
464 (((target_ulong)(-1ULL) >> (end)) >> 1);
465 if (unlikely(start > end))
472 /*****************************************************************************/
473 /* PowerPC Instructions types definitions */
475 PPC_NONE = 0x0000000000000000ULL,
476 /* PowerPC base instructions set */
477 PPC_INSNS_BASE = 0x0000000000000001ULL,
478 /* integer operations instructions */
479 #define PPC_INTEGER PPC_INSNS_BASE
480 /* flow control instructions */
481 #define PPC_FLOW PPC_INSNS_BASE
482 /* virtual memory instructions */
483 #define PPC_MEM PPC_INSNS_BASE
484 /* ld/st with reservation instructions */
485 #define PPC_RES PPC_INSNS_BASE
486 /* spr/msr access instructions */
487 #define PPC_MISC PPC_INSNS_BASE
488 /* Deprecated instruction sets */
489 /* Original POWER instruction set */
490 PPC_POWER = 0x0000000000000002ULL,
491 /* POWER2 instruction set extension */
492 PPC_POWER2 = 0x0000000000000004ULL,
493 /* Power RTC support */
494 PPC_POWER_RTC = 0x0000000000000008ULL,
495 /* Power-to-PowerPC bridge (601) */
496 PPC_POWER_BR = 0x0000000000000010ULL,
497 /* 64 bits PowerPC instruction set */
498 PPC_64B = 0x0000000000000020ULL,
499 /* New 64 bits extensions (PowerPC 2.0x) */
500 PPC_64BX = 0x0000000000000040ULL,
501 /* 64 bits hypervisor extensions */
502 PPC_64H = 0x0000000000000080ULL,
503 /* New wait instruction (PowerPC 2.0x) */
504 PPC_WAIT = 0x0000000000000100ULL,
505 /* Time base mftb instruction */
506 PPC_MFTB = 0x0000000000000200ULL,
508 /* Fixed-point unit extensions */
509 /* PowerPC 602 specific */
510 PPC_602_SPEC = 0x0000000000000400ULL,
511 /* isel instruction */
512 PPC_ISEL = 0x0000000000000800ULL,
513 /* popcntb instruction */
514 PPC_POPCNTB = 0x0000000000001000ULL,
515 /* string load / store */
516 PPC_STRING = 0x0000000000002000ULL,
518 /* Floating-point unit extensions */
519 /* Optional floating point instructions */
520 PPC_FLOAT = 0x0000000000010000ULL,
521 /* New floating-point extensions (PowerPC 2.0x) */
522 PPC_FLOAT_EXT = 0x0000000000020000ULL,
523 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
524 PPC_FLOAT_FRES = 0x0000000000080000ULL,
525 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
526 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
527 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
528 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
530 /* Vector/SIMD extensions */
531 /* Altivec support */
532 PPC_ALTIVEC = 0x0000000001000000ULL,
533 /* PowerPC 2.03 SPE extension */
534 PPC_SPE = 0x0000000002000000ULL,
535 /* PowerPC 2.03 SPE floating-point extension */
536 PPC_SPEFPU = 0x0000000004000000ULL,
538 /* Optional memory control instructions */
539 PPC_MEM_TLBIA = 0x0000000010000000ULL,
540 PPC_MEM_TLBIE = 0x0000000020000000ULL,
541 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
542 /* sync instruction */
543 PPC_MEM_SYNC = 0x0000000080000000ULL,
544 /* eieio instruction */
545 PPC_MEM_EIEIO = 0x0000000100000000ULL,
547 /* Cache control instructions */
548 PPC_CACHE = 0x0000000200000000ULL,
549 /* icbi instruction */
550 PPC_CACHE_ICBI = 0x0000000400000000ULL,
551 /* dcbz instruction with fixed cache line size */
552 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
553 /* dcbz instruction with tunable cache line size */
554 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
555 /* dcba instruction */
556 PPC_CACHE_DCBA = 0x0000002000000000ULL,
557 /* Freescale cache locking instructions */
558 PPC_CACHE_LOCK = 0x0000004000000000ULL,
560 /* MMU related extensions */
561 /* external control instructions */
562 PPC_EXTERN = 0x0000010000000000ULL,
563 /* segment register access instructions */
564 PPC_SEGMENT = 0x0000020000000000ULL,
565 /* PowerPC 6xx TLB management instructions */
566 PPC_6xx_TLB = 0x0000040000000000ULL,
567 /* PowerPC 74xx TLB management instructions */
568 PPC_74xx_TLB = 0x0000080000000000ULL,
569 /* PowerPC 40x TLB management instructions */
570 PPC_40x_TLB = 0x0000100000000000ULL,
571 /* segment register access instructions for PowerPC 64 "bridge" */
572 PPC_SEGMENT_64B = 0x0000200000000000ULL,
574 PPC_SLBI = 0x0000400000000000ULL,
576 /* Embedded PowerPC dedicated instructions */
577 PPC_WRTEE = 0x0001000000000000ULL,
578 /* PowerPC 40x exception model */
579 PPC_40x_EXCP = 0x0002000000000000ULL,
580 /* PowerPC 405 Mac instructions */
581 PPC_405_MAC = 0x0004000000000000ULL,
582 /* PowerPC 440 specific instructions */
583 PPC_440_SPEC = 0x0008000000000000ULL,
584 /* BookE (embedded) PowerPC specification */
585 PPC_BOOKE = 0x0010000000000000ULL,
586 /* mfapidi instruction */
587 PPC_MFAPIDI = 0x0020000000000000ULL,
588 /* tlbiva instruction */
589 PPC_TLBIVA = 0x0040000000000000ULL,
590 /* tlbivax instruction */
591 PPC_TLBIVAX = 0x0080000000000000ULL,
592 /* PowerPC 4xx dedicated instructions */
593 PPC_4xx_COMMON = 0x0100000000000000ULL,
594 /* PowerPC 40x ibct instructions */
595 PPC_40x_ICBT = 0x0200000000000000ULL,
596 /* rfmci is not implemented in all BookE PowerPC */
597 PPC_RFMCI = 0x0400000000000000ULL,
598 /* rfdi instruction */
599 PPC_RFDI = 0x0800000000000000ULL,
601 PPC_DCR = 0x1000000000000000ULL,
602 /* DCR extended accesse */
603 PPC_DCRX = 0x2000000000000000ULL,
604 /* user-mode DCR access, implemented in PowerPC 460 */
605 PPC_DCRUX = 0x4000000000000000ULL,
608 /*****************************************************************************/
609 /* PowerPC instructions table */
610 #if HOST_LONG_BITS == 64
615 #if defined(__APPLE__)
616 #define OPCODES_SECTION \
617 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
619 #define OPCODES_SECTION \
620 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
623 #if defined(DO_PPC_STATISTICS)
624 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
625 OPCODES_SECTION opcode_t opc_##name = { \
633 .handler = &gen_##name, \
634 .oname = stringify(name), \
636 .oname = stringify(name), \
638 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
639 OPCODES_SECTION opcode_t opc_##name = { \
647 .handler = &gen_##name, \
653 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
654 OPCODES_SECTION opcode_t opc_##name = { \
662 .handler = &gen_##name, \
664 .oname = stringify(name), \
666 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
667 OPCODES_SECTION opcode_t opc_##name = { \
675 .handler = &gen_##name, \
681 #define GEN_OPCODE_MARK(name) \
682 OPCODES_SECTION opcode_t opc_##name = { \
688 .inval = 0x00000000, \
692 .oname = stringify(name), \
695 /* SPR load/store helpers */
696 static always_inline void gen_load_spr(TCGv t, int reg)
698 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
701 static always_inline void gen_store_spr(int reg, TCGv t)
703 tcg_gen_st_tl(t, cpu_env, offsetof(CPUState, spr[reg]));
706 /* Start opcode list */
707 GEN_OPCODE_MARK(start);
709 /* Invalid instruction */
710 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
715 static opc_handler_t invalid_handler = {
718 .handler = gen_invalid,
721 /*** Integer comparison ***/
723 static always_inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
727 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
728 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
729 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
731 l1 = gen_new_label();
732 l2 = gen_new_label();
733 l3 = gen_new_label();
735 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
736 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
738 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
739 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
741 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
744 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
747 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
751 static always_inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
753 TCGv t0 = tcg_const_local_tl(arg1);
754 gen_op_cmp(arg0, t0, s, crf);
758 #if defined(TARGET_PPC64)
759 static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
762 t0 = tcg_temp_local_new();
763 t1 = tcg_temp_local_new();
765 tcg_gen_ext32s_tl(t0, arg0);
766 tcg_gen_ext32s_tl(t1, arg1);
768 tcg_gen_ext32u_tl(t0, arg0);
769 tcg_gen_ext32u_tl(t1, arg1);
771 gen_op_cmp(t0, t1, s, crf);
776 static always_inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
778 TCGv t0 = tcg_const_local_tl(arg1);
779 gen_op_cmp32(arg0, t0, s, crf);
784 static always_inline void gen_set_Rc0 (DisasContext *ctx, TCGv reg)
786 #if defined(TARGET_PPC64)
788 gen_op_cmpi32(reg, 0, 1, 0);
791 gen_op_cmpi(reg, 0, 1, 0);
795 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER)
797 #if defined(TARGET_PPC64)
798 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
799 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
800 1, crfD(ctx->opcode));
803 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
804 1, crfD(ctx->opcode));
808 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
810 #if defined(TARGET_PPC64)
811 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
812 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
813 1, crfD(ctx->opcode));
816 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
817 1, crfD(ctx->opcode));
821 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER)
823 #if defined(TARGET_PPC64)
824 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
825 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
826 0, crfD(ctx->opcode));
829 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
830 0, crfD(ctx->opcode));
834 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
836 #if defined(TARGET_PPC64)
837 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
838 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
839 0, crfD(ctx->opcode));
842 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
843 0, crfD(ctx->opcode));
846 /* isel (PowerPC 2.03 specification) */
847 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
850 uint32_t bi = rC(ctx->opcode);
854 l1 = gen_new_label();
855 l2 = gen_new_label();
857 mask = 1 << (3 - (bi & 0x03));
858 t0 = tcg_temp_new_i32();
859 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
860 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
861 if (rA(ctx->opcode) == 0)
862 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
864 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
867 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
869 tcg_temp_free_i32(t0);
872 /*** Integer arithmetic ***/
874 static always_inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, TCGv arg1, TCGv arg2, int sub)
879 l1 = gen_new_label();
880 /* Start with XER OV disabled, the most likely case */
881 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
882 t0 = tcg_temp_local_new();
883 tcg_gen_xor_tl(t0, arg0, arg1);
884 #if defined(TARGET_PPC64)
886 tcg_gen_ext32s_tl(t0, t0);
889 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
891 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
892 tcg_gen_xor_tl(t0, arg1, arg2);
893 #if defined(TARGET_PPC64)
895 tcg_gen_ext32s_tl(t0, t0);
898 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
900 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
901 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
906 static always_inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1, TCGv arg2, int sub)
908 int l1 = gen_new_label();
910 #if defined(TARGET_PPC64)
911 if (!(ctx->sf_mode)) {
916 tcg_gen_ext32u_tl(t0, arg1);
917 tcg_gen_ext32u_tl(t1, arg2);
919 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
921 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
923 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
931 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
933 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
935 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
940 /* Common add function */
941 static always_inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
942 int add_ca, int compute_ca, int compute_ov)
946 if ((!compute_ca && !compute_ov) ||
947 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
950 t0 = tcg_temp_local_new();
954 t1 = tcg_temp_local_new();
955 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
956 tcg_gen_shri_tl(t1, t1, XER_CA);
959 if (compute_ca && compute_ov) {
960 /* Start with XER CA and OV disabled, the most likely case */
961 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
962 } else if (compute_ca) {
963 /* Start with XER CA disabled, the most likely case */
964 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
965 } else if (compute_ov) {
966 /* Start with XER OV disabled, the most likely case */
967 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
970 tcg_gen_add_tl(t0, arg1, arg2);
973 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
976 tcg_gen_add_tl(t0, t0, t1);
977 gen_op_arith_compute_ca(ctx, t0, t1, 0);
981 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
984 if (unlikely(Rc(ctx->opcode) != 0))
985 gen_set_Rc0(ctx, t0);
987 if (!TCGV_EQUAL(t0, ret)) {
988 tcg_gen_mov_tl(ret, t0);
992 /* Add functions with two operands */
993 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
994 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER) \
996 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
997 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
998 add_ca, compute_ca, compute_ov); \
1000 /* Add functions with one operand and one immediate */
1001 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
1002 add_ca, compute_ca, compute_ov) \
1003 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER) \
1005 TCGv t0 = tcg_const_local_tl(const_val); \
1006 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1007 cpu_gpr[rA(ctx->opcode)], t0, \
1008 add_ca, compute_ca, compute_ov); \
1009 tcg_temp_free(t0); \
1012 /* add add. addo addo. */
1013 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
1014 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
1015 /* addc addc. addco addco. */
1016 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
1017 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
1018 /* adde adde. addeo addeo. */
1019 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
1020 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
1021 /* addme addme. addmeo addmeo. */
1022 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
1023 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
1024 /* addze addze. addzeo addzeo.*/
1025 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
1026 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
1028 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1030 target_long simm = SIMM(ctx->opcode);
1032 if (rA(ctx->opcode) == 0) {
1034 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1036 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
1040 static always_inline void gen_op_addic (DisasContext *ctx, TCGv ret, TCGv arg1,
1043 target_long simm = SIMM(ctx->opcode);
1045 /* Start with XER CA and OV disabled, the most likely case */
1046 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1048 if (likely(simm != 0)) {
1049 TCGv t0 = tcg_temp_local_new();
1050 tcg_gen_addi_tl(t0, arg1, simm);
1051 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
1052 tcg_gen_mov_tl(ret, t0);
1055 tcg_gen_mov_tl(ret, arg1);
1058 gen_set_Rc0(ctx, ret);
1061 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1063 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1065 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1067 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1070 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1072 target_long simm = SIMM(ctx->opcode);
1074 if (rA(ctx->opcode) == 0) {
1076 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1078 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
1082 static always_inline void gen_op_arith_divw (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1083 int sign, int compute_ov)
1085 int l1 = gen_new_label();
1086 int l2 = gen_new_label();
1087 TCGv_i32 t0 = tcg_temp_local_new_i32();
1088 TCGv_i32 t1 = tcg_temp_local_new_i32();
1090 tcg_gen_trunc_tl_i32(t0, arg1);
1091 tcg_gen_trunc_tl_i32(t1, arg2);
1092 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
1094 int l3 = gen_new_label();
1095 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
1096 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
1098 tcg_gen_div_i32(t0, t0, t1);
1100 tcg_gen_divu_i32(t0, t0, t1);
1103 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1108 tcg_gen_sari_i32(t0, t0, 31);
1110 tcg_gen_movi_i32(t0, 0);
1113 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1116 tcg_gen_extu_i32_tl(ret, t0);
1117 tcg_temp_free_i32(t0);
1118 tcg_temp_free_i32(t1);
1119 if (unlikely(Rc(ctx->opcode) != 0))
1120 gen_set_Rc0(ctx, ret);
1123 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1124 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) \
1126 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1127 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1128 sign, compute_ov); \
1130 /* divwu divwu. divwuo divwuo. */
1131 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1132 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1133 /* divw divw. divwo divwo. */
1134 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1135 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1136 #if defined(TARGET_PPC64)
1137 static always_inline void gen_op_arith_divd (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1138 int sign, int compute_ov)
1140 int l1 = gen_new_label();
1141 int l2 = gen_new_label();
1143 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
1145 int l3 = gen_new_label();
1146 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
1147 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
1149 tcg_gen_div_i64(ret, arg1, arg2);
1151 tcg_gen_divu_i64(ret, arg1, arg2);
1154 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1159 tcg_gen_sari_i64(ret, arg1, 63);
1161 tcg_gen_movi_i64(ret, 0);
1164 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1167 if (unlikely(Rc(ctx->opcode) != 0))
1168 gen_set_Rc0(ctx, ret);
1170 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1171 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1173 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1174 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1175 sign, compute_ov); \
1177 /* divwu divwu. divwuo divwuo. */
1178 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1179 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1180 /* divw divw. divwo divwo. */
1181 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1182 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1186 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER)
1190 t0 = tcg_temp_new_i64();
1191 t1 = tcg_temp_new_i64();
1192 #if defined(TARGET_PPC64)
1193 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1194 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1195 tcg_gen_mul_i64(t0, t0, t1);
1196 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1198 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1199 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1200 tcg_gen_mul_i64(t0, t0, t1);
1201 tcg_gen_shri_i64(t0, t0, 32);
1202 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1204 tcg_temp_free_i64(t0);
1205 tcg_temp_free_i64(t1);
1206 if (unlikely(Rc(ctx->opcode) != 0))
1207 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1209 /* mulhwu mulhwu. */
1210 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER)
1214 t0 = tcg_temp_new_i64();
1215 t1 = tcg_temp_new_i64();
1216 #if defined(TARGET_PPC64)
1217 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1218 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1219 tcg_gen_mul_i64(t0, t0, t1);
1220 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1222 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1223 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1224 tcg_gen_mul_i64(t0, t0, t1);
1225 tcg_gen_shri_i64(t0, t0, 32);
1226 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1228 tcg_temp_free_i64(t0);
1229 tcg_temp_free_i64(t1);
1230 if (unlikely(Rc(ctx->opcode) != 0))
1231 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1234 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER)
1236 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1237 cpu_gpr[rB(ctx->opcode)]);
1238 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1239 if (unlikely(Rc(ctx->opcode) != 0))
1240 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1242 /* mullwo mullwo. */
1243 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER)
1248 t0 = tcg_temp_new_i64();
1249 t1 = tcg_temp_new_i64();
1250 l1 = gen_new_label();
1251 /* Start with XER OV disabled, the most likely case */
1252 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1253 #if defined(TARGET_PPC64)
1254 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1255 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1257 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1258 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1260 tcg_gen_mul_i64(t0, t0, t1);
1261 #if defined(TARGET_PPC64)
1262 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1263 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1265 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1266 tcg_gen_ext32s_i64(t1, t0);
1267 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1269 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1271 tcg_temp_free_i64(t0);
1272 tcg_temp_free_i64(t1);
1273 if (unlikely(Rc(ctx->opcode) != 0))
1274 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1277 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1279 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1282 #if defined(TARGET_PPC64)
1283 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1284 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1286 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1287 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1288 if (unlikely(Rc(ctx->opcode) != 0)) \
1289 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1292 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1293 /* mulhdu mulhdu. */
1294 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1296 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B)
1298 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1299 cpu_gpr[rB(ctx->opcode)]);
1300 if (unlikely(Rc(ctx->opcode) != 0))
1301 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1303 /* mulldo mulldo. */
1304 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1307 /* neg neg. nego nego. */
1308 static always_inline void gen_op_arith_neg (DisasContext *ctx, TCGv ret, TCGv arg1, int ov_check)
1310 int l1 = gen_new_label();
1311 int l2 = gen_new_label();
1312 TCGv t0 = tcg_temp_local_new();
1313 #if defined(TARGET_PPC64)
1315 tcg_gen_mov_tl(t0, arg1);
1316 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1320 tcg_gen_ext32s_tl(t0, arg1);
1321 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1323 tcg_gen_neg_tl(ret, arg1);
1325 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1329 tcg_gen_mov_tl(ret, t0);
1331 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1335 if (unlikely(Rc(ctx->opcode) != 0))
1336 gen_set_Rc0(ctx, ret);
1338 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER)
1340 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1342 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER)
1344 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1347 /* Common subf function */
1348 static always_inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1349 int add_ca, int compute_ca, int compute_ov)
1353 if ((!compute_ca && !compute_ov) ||
1354 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1357 t0 = tcg_temp_local_new();
1361 t1 = tcg_temp_local_new();
1362 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1363 tcg_gen_shri_tl(t1, t1, XER_CA);
1366 if (compute_ca && compute_ov) {
1367 /* Start with XER CA and OV disabled, the most likely case */
1368 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1369 } else if (compute_ca) {
1370 /* Start with XER CA disabled, the most likely case */
1371 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1372 } else if (compute_ov) {
1373 /* Start with XER OV disabled, the most likely case */
1374 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1378 tcg_gen_not_tl(t0, arg1);
1379 tcg_gen_add_tl(t0, t0, arg2);
1380 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1381 tcg_gen_add_tl(t0, t0, t1);
1382 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1385 tcg_gen_sub_tl(t0, arg2, arg1);
1387 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1391 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1394 if (unlikely(Rc(ctx->opcode) != 0))
1395 gen_set_Rc0(ctx, t0);
1397 if (!TCGV_EQUAL(t0, ret)) {
1398 tcg_gen_mov_tl(ret, t0);
1402 /* Sub functions with Two operands functions */
1403 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1404 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER) \
1406 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1407 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1408 add_ca, compute_ca, compute_ov); \
1410 /* Sub functions with one operand and one immediate */
1411 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1412 add_ca, compute_ca, compute_ov) \
1413 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER) \
1415 TCGv t0 = tcg_const_local_tl(const_val); \
1416 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1417 cpu_gpr[rA(ctx->opcode)], t0, \
1418 add_ca, compute_ca, compute_ov); \
1419 tcg_temp_free(t0); \
1421 /* subf subf. subfo subfo. */
1422 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1423 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1424 /* subfc subfc. subfco subfco. */
1425 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1426 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1427 /* subfe subfe. subfeo subfo. */
1428 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1429 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1430 /* subfme subfme. subfmeo subfmeo. */
1431 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1432 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1433 /* subfze subfze. subfzeo subfzeo.*/
1434 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1435 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1437 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1439 /* Start with XER CA and OV disabled, the most likely case */
1440 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1441 TCGv t0 = tcg_temp_local_new();
1442 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1443 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1444 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1446 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1450 /*** Integer logical ***/
1451 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1452 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) \
1454 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1455 cpu_gpr[rB(ctx->opcode)]); \
1456 if (unlikely(Rc(ctx->opcode) != 0)) \
1457 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1460 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1461 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1463 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1464 if (unlikely(Rc(ctx->opcode) != 0)) \
1465 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1469 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1471 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1473 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1475 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1476 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1479 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1481 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1482 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1485 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER)
1487 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1488 if (unlikely(Rc(ctx->opcode) != 0))
1489 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1492 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1493 /* extsb & extsb. */
1494 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1495 /* extsh & extsh. */
1496 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1498 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1500 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1502 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1506 rs = rS(ctx->opcode);
1507 ra = rA(ctx->opcode);
1508 rb = rB(ctx->opcode);
1509 /* Optimisation for mr. ri case */
1510 if (rs != ra || rs != rb) {
1512 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1514 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1515 if (unlikely(Rc(ctx->opcode) != 0))
1516 gen_set_Rc0(ctx, cpu_gpr[ra]);
1517 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1518 gen_set_Rc0(ctx, cpu_gpr[rs]);
1519 #if defined(TARGET_PPC64)
1525 /* Set process priority to low */
1529 /* Set process priority to medium-low */
1533 /* Set process priority to normal */
1536 #if !defined(CONFIG_USER_ONLY)
1538 if (ctx->supervisor > 0) {
1539 /* Set process priority to very low */
1544 if (ctx->supervisor > 0) {
1545 /* Set process priority to medium-hight */
1550 if (ctx->supervisor > 0) {
1551 /* Set process priority to high */
1556 if (ctx->supervisor > 1) {
1557 /* Set process priority to very high */
1567 TCGv t0 = tcg_temp_new();
1568 gen_load_spr(t0, SPR_PPR);
1569 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1570 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1571 gen_store_spr(SPR_PPR, t0);
1578 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1580 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1582 /* Optimisation for "set to zero" case */
1583 if (rS(ctx->opcode) != rB(ctx->opcode))
1584 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1586 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1587 if (unlikely(Rc(ctx->opcode) != 0))
1588 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1591 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1593 target_ulong uimm = UIMM(ctx->opcode);
1595 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1597 /* XXX: should handle special NOPs for POWER series */
1600 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1603 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1605 target_ulong uimm = UIMM(ctx->opcode);
1607 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1611 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1614 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1616 target_ulong uimm = UIMM(ctx->opcode);
1618 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1622 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1625 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1627 target_ulong uimm = UIMM(ctx->opcode);
1629 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1633 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1635 /* popcntb : PowerPC 2.03 specification */
1636 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1638 #if defined(TARGET_PPC64)
1640 gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1643 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1646 #if defined(TARGET_PPC64)
1647 /* extsw & extsw. */
1648 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1650 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B)
1652 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1653 if (unlikely(Rc(ctx->opcode) != 0))
1654 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1658 /*** Integer rotate ***/
1659 /* rlwimi & rlwimi. */
1660 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1662 uint32_t mb, me, sh;
1664 mb = MB(ctx->opcode);
1665 me = ME(ctx->opcode);
1666 sh = SH(ctx->opcode);
1667 if (likely(sh == 0 && mb == 0 && me == 31)) {
1668 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1672 TCGv t0 = tcg_temp_new();
1673 #if defined(TARGET_PPC64)
1674 TCGv_i32 t2 = tcg_temp_new_i32();
1675 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1676 tcg_gen_rotli_i32(t2, t2, sh);
1677 tcg_gen_extu_i32_i64(t0, t2);
1678 tcg_temp_free_i32(t2);
1680 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1682 #if defined(TARGET_PPC64)
1686 mask = MASK(mb, me);
1687 t1 = tcg_temp_new();
1688 tcg_gen_andi_tl(t0, t0, mask);
1689 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1690 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1694 if (unlikely(Rc(ctx->opcode) != 0))
1695 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1697 /* rlwinm & rlwinm. */
1698 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1700 uint32_t mb, me, sh;
1702 sh = SH(ctx->opcode);
1703 mb = MB(ctx->opcode);
1704 me = ME(ctx->opcode);
1706 if (likely(mb == 0 && me == (31 - sh))) {
1707 if (likely(sh == 0)) {
1708 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1710 TCGv t0 = tcg_temp_new();
1711 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1712 tcg_gen_shli_tl(t0, t0, sh);
1713 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1716 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1717 TCGv t0 = tcg_temp_new();
1718 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1719 tcg_gen_shri_tl(t0, t0, mb);
1720 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1723 TCGv t0 = tcg_temp_new();
1724 #if defined(TARGET_PPC64)
1725 TCGv_i32 t1 = tcg_temp_new_i32();
1726 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1727 tcg_gen_rotli_i32(t1, t1, sh);
1728 tcg_gen_extu_i32_i64(t0, t1);
1729 tcg_temp_free_i32(t1);
1731 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1733 #if defined(TARGET_PPC64)
1737 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1740 if (unlikely(Rc(ctx->opcode) != 0))
1741 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1743 /* rlwnm & rlwnm. */
1744 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1748 #if defined(TARGET_PPC64)
1752 mb = MB(ctx->opcode);
1753 me = ME(ctx->opcode);
1754 t0 = tcg_temp_new();
1755 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1756 #if defined(TARGET_PPC64)
1757 t1 = tcg_temp_new_i32();
1758 t2 = tcg_temp_new_i32();
1759 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1760 tcg_gen_trunc_i64_i32(t2, t0);
1761 tcg_gen_rotl_i32(t1, t1, t2);
1762 tcg_gen_extu_i32_i64(t0, t1);
1763 tcg_temp_free_i32(t1);
1764 tcg_temp_free_i32(t2);
1766 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1768 if (unlikely(mb != 0 || me != 31)) {
1769 #if defined(TARGET_PPC64)
1773 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1775 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1778 if (unlikely(Rc(ctx->opcode) != 0))
1779 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1782 #if defined(TARGET_PPC64)
1783 #define GEN_PPC64_R2(name, opc1, opc2) \
1784 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1786 gen_##name(ctx, 0); \
1788 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1791 gen_##name(ctx, 1); \
1793 #define GEN_PPC64_R4(name, opc1, opc2) \
1794 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1796 gen_##name(ctx, 0, 0); \
1798 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1801 gen_##name(ctx, 0, 1); \
1803 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1806 gen_##name(ctx, 1, 0); \
1808 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1811 gen_##name(ctx, 1, 1); \
1814 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1815 uint32_t me, uint32_t sh)
1817 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1818 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1819 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1820 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1822 TCGv t0 = tcg_temp_new();
1823 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1824 if (likely(mb == 0 && me == 63)) {
1825 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1827 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1831 if (unlikely(Rc(ctx->opcode) != 0))
1832 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1834 /* rldicl - rldicl. */
1835 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1839 sh = SH(ctx->opcode) | (shn << 5);
1840 mb = MB(ctx->opcode) | (mbn << 5);
1841 gen_rldinm(ctx, mb, 63, sh);
1843 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1844 /* rldicr - rldicr. */
1845 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1849 sh = SH(ctx->opcode) | (shn << 5);
1850 me = MB(ctx->opcode) | (men << 5);
1851 gen_rldinm(ctx, 0, me, sh);
1853 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1854 /* rldic - rldic. */
1855 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1859 sh = SH(ctx->opcode) | (shn << 5);
1860 mb = MB(ctx->opcode) | (mbn << 5);
1861 gen_rldinm(ctx, mb, 63 - sh, sh);
1863 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1865 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1870 mb = MB(ctx->opcode);
1871 me = ME(ctx->opcode);
1872 t0 = tcg_temp_new();
1873 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1874 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1875 if (unlikely(mb != 0 || me != 63)) {
1876 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1878 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1881 if (unlikely(Rc(ctx->opcode) != 0))
1882 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1885 /* rldcl - rldcl. */
1886 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1890 mb = MB(ctx->opcode) | (mbn << 5);
1891 gen_rldnm(ctx, mb, 63);
1893 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1894 /* rldcr - rldcr. */
1895 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1899 me = MB(ctx->opcode) | (men << 5);
1900 gen_rldnm(ctx, 0, me);
1902 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1903 /* rldimi - rldimi. */
1904 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1906 uint32_t sh, mb, me;
1908 sh = SH(ctx->opcode) | (shn << 5);
1909 mb = MB(ctx->opcode) | (mbn << 5);
1911 if (unlikely(sh == 0 && mb == 0)) {
1912 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1917 t0 = tcg_temp_new();
1918 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1919 t1 = tcg_temp_new();
1920 mask = MASK(mb, me);
1921 tcg_gen_andi_tl(t0, t0, mask);
1922 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1923 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1927 if (unlikely(Rc(ctx->opcode) != 0))
1928 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1930 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1933 /*** Integer shift ***/
1935 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER)
1939 l1 = gen_new_label();
1940 l2 = gen_new_label();
1942 t0 = tcg_temp_local_new();
1943 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1944 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1945 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1948 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
1949 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1952 if (unlikely(Rc(ctx->opcode) != 0))
1953 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1956 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER)
1958 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1959 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1960 if (unlikely(Rc(ctx->opcode) != 0))
1961 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1963 /* srawi & srawi. */
1964 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1966 int sh = SH(ctx->opcode);
1970 l1 = gen_new_label();
1971 l2 = gen_new_label();
1972 t0 = tcg_temp_local_new();
1973 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1974 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1975 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1976 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1977 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1980 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1982 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1983 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1986 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1987 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1989 if (unlikely(Rc(ctx->opcode) != 0))
1990 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1993 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER)
1997 l1 = gen_new_label();
1998 l2 = gen_new_label();
2000 t0 = tcg_temp_local_new();
2001 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
2002 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
2003 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2006 t1 = tcg_temp_new();
2007 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
2008 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0);
2012 if (unlikely(Rc(ctx->opcode) != 0))
2013 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2015 #if defined(TARGET_PPC64)
2017 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B)
2021 l1 = gen_new_label();
2022 l2 = gen_new_label();
2024 t0 = tcg_temp_local_new();
2025 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2026 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2027 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2030 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2033 if (unlikely(Rc(ctx->opcode) != 0))
2034 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2037 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B)
2039 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
2040 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2041 if (unlikely(Rc(ctx->opcode) != 0))
2042 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2044 /* sradi & sradi. */
2045 static always_inline void gen_sradi (DisasContext *ctx, int n)
2047 int sh = SH(ctx->opcode) + (n << 5);
2051 l1 = gen_new_label();
2052 l2 = gen_new_label();
2053 t0 = tcg_temp_local_new();
2054 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
2055 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
2056 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2057 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
2060 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2063 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
2065 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2066 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2068 if (unlikely(Rc(ctx->opcode) != 0))
2069 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2071 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
2075 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
2080 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B)
2084 l1 = gen_new_label();
2085 l2 = gen_new_label();
2087 t0 = tcg_temp_local_new();
2088 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2089 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2090 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2093 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2096 if (unlikely(Rc(ctx->opcode) != 0))
2097 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2101 /*** Floating-Point arithmetic ***/
2102 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2103 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
2105 if (unlikely(!ctx->fpu_enabled)) { \
2106 GEN_EXCP_NO_FP(ctx); \
2109 gen_reset_fpstatus(); \
2110 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2111 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2113 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2115 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2116 Rc(ctx->opcode) != 0); \
2119 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2120 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2121 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2123 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2124 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2126 if (unlikely(!ctx->fpu_enabled)) { \
2127 GEN_EXCP_NO_FP(ctx); \
2130 gen_reset_fpstatus(); \
2131 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2132 cpu_fpr[rB(ctx->opcode)]); \
2134 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2136 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2137 set_fprf, Rc(ctx->opcode) != 0); \
2139 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2140 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2141 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2143 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2144 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2146 if (unlikely(!ctx->fpu_enabled)) { \
2147 GEN_EXCP_NO_FP(ctx); \
2150 gen_reset_fpstatus(); \
2151 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)], \
2152 cpu_fpr[rC(ctx->opcode)]); \
2154 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]); \
2156 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2157 set_fprf, Rc(ctx->opcode) != 0); \
2159 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2160 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2161 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2163 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2164 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
2166 if (unlikely(!ctx->fpu_enabled)) { \
2167 GEN_EXCP_NO_FP(ctx); \
2170 gen_reset_fpstatus(); \
2171 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2172 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2173 set_fprf, Rc(ctx->opcode) != 0); \
2176 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2177 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
2179 if (unlikely(!ctx->fpu_enabled)) { \
2180 GEN_EXCP_NO_FP(ctx); \
2183 gen_reset_fpstatus(); \
2184 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2185 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2186 set_fprf, Rc(ctx->opcode) != 0); \
2190 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2192 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2194 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2197 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2200 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2203 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2206 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES)
2208 if (unlikely(!ctx->fpu_enabled)) {
2209 GEN_EXCP_NO_FP(ctx);
2212 gen_reset_fpstatus();
2213 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2214 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2215 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2219 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2221 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2224 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2226 if (unlikely(!ctx->fpu_enabled)) {
2227 GEN_EXCP_NO_FP(ctx);
2230 gen_reset_fpstatus();
2231 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2232 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2235 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2237 if (unlikely(!ctx->fpu_enabled)) {
2238 GEN_EXCP_NO_FP(ctx);
2241 gen_reset_fpstatus();
2242 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2243 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rD(ctx->opcode)]);
2244 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2247 /*** Floating-Point multiply-and-add ***/
2248 /* fmadd - fmadds */
2249 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2250 /* fmsub - fmsubs */
2251 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2252 /* fnmadd - fnmadds */
2253 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2254 /* fnmsub - fnmsubs */
2255 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2257 /*** Floating-Point round & convert ***/
2259 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2261 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2263 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2264 #if defined(TARGET_PPC64)
2266 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2268 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2270 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2274 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2276 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2278 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2280 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2282 /*** Floating-Point compare ***/
2284 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
2286 if (unlikely(!ctx->fpu_enabled)) {
2287 GEN_EXCP_NO_FP(ctx);
2290 gen_reset_fpstatus();
2291 gen_helper_fcmpo(cpu_crf[crfD(ctx->opcode)],
2292 cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2293 gen_helper_float_check_status();
2297 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
2299 if (unlikely(!ctx->fpu_enabled)) {
2300 GEN_EXCP_NO_FP(ctx);
2303 gen_reset_fpstatus();
2304 gen_helper_fcmpu(cpu_crf[crfD(ctx->opcode)],
2305 cpu_fpr[rA(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2306 gen_helper_float_check_status();
2309 /*** Floating-point move ***/
2311 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2312 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2315 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2316 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
2318 if (unlikely(!ctx->fpu_enabled)) {
2319 GEN_EXCP_NO_FP(ctx);
2322 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2323 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2327 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2328 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2330 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2331 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2333 /*** Floating-Point status & ctrl register ***/
2335 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
2339 if (unlikely(!ctx->fpu_enabled)) {
2340 GEN_EXCP_NO_FP(ctx);
2343 gen_optimize_fprf();
2344 bfa = 4 * (7 - crfS(ctx->opcode));
2345 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2346 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2347 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2351 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2353 if (unlikely(!ctx->fpu_enabled)) {
2354 GEN_EXCP_NO_FP(ctx);
2357 gen_optimize_fprf();
2358 gen_reset_fpstatus();
2359 tcg_gen_extu_i32_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2360 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2364 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2368 if (unlikely(!ctx->fpu_enabled)) {
2369 GEN_EXCP_NO_FP(ctx);
2372 crb = 32 - (crbD(ctx->opcode) >> 2);
2373 gen_optimize_fprf();
2374 gen_reset_fpstatus();
2375 if (likely(crb != 30 && crb != 29))
2376 tcg_gen_andi_i32(cpu_fpscr, cpu_fpscr, ~(1 << crb));
2377 if (unlikely(Rc(ctx->opcode) != 0)) {
2378 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2383 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2387 if (unlikely(!ctx->fpu_enabled)) {
2388 GEN_EXCP_NO_FP(ctx);
2391 crb = 32 - (crbD(ctx->opcode) >> 2);
2392 gen_optimize_fprf();
2393 gen_reset_fpstatus();
2394 /* XXX: we pretend we can only do IEEE floating-point computations */
2395 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2396 TCGv_i32 t0 = tcg_const_i32(crb);
2397 gen_helper_fpscr_setbit(t0);
2398 tcg_temp_free_i32(t0);
2400 if (unlikely(Rc(ctx->opcode) != 0)) {
2401 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2403 /* We can raise a differed exception */
2404 gen_helper_float_check_status();
2408 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2412 if (unlikely(!ctx->fpu_enabled)) {
2413 GEN_EXCP_NO_FP(ctx);
2416 gen_optimize_fprf();
2417 gen_reset_fpstatus();
2418 t0 = tcg_const_i32(FM(ctx->opcode));
2419 gen_helper_store_fpscr(cpu_fpr[rB(ctx->opcode)], t0);
2420 tcg_temp_free_i32(t0);
2421 if (unlikely(Rc(ctx->opcode) != 0)) {
2422 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2424 /* We can raise a differed exception */
2425 gen_helper_float_check_status();
2429 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2435 if (unlikely(!ctx->fpu_enabled)) {
2436 GEN_EXCP_NO_FP(ctx);
2439 bf = crbD(ctx->opcode) >> 2;
2441 gen_optimize_fprf();
2442 gen_reset_fpstatus();
2443 t0 = tcg_const_i64(FPIMM(ctx->opcode) << (4 * sh));
2444 t1 = tcg_const_i32(1 << sh);
2445 gen_helper_store_fpscr(t0, t1);
2446 tcg_temp_free_i64(t0);
2447 tcg_temp_free_i32(t1);
2448 if (unlikely(Rc(ctx->opcode) != 0)) {
2449 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2451 /* We can raise a differed exception */
2452 gen_helper_float_check_status();
2455 /*** Addressing modes ***/
2456 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2457 static always_inline void gen_addr_imm_index (TCGv EA,
2461 target_long simm = SIMM(ctx->opcode);
2464 if (rA(ctx->opcode) == 0)
2465 tcg_gen_movi_tl(EA, simm);
2466 else if (likely(simm != 0))
2467 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2469 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2472 static always_inline void gen_addr_reg_index (TCGv EA,
2475 if (rA(ctx->opcode) == 0)
2476 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2478 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2481 static always_inline void gen_addr_register (TCGv EA,
2484 if (rA(ctx->opcode) == 0)
2485 tcg_gen_movi_tl(EA, 0);
2487 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2490 static always_inline void gen_check_align (DisasContext *ctx, TCGv EA, int mask)
2492 int l1 = gen_new_label();
2493 TCGv t0 = tcg_temp_new();
2495 /* NIP cannot be restored if the memory exception comes from an helper */
2496 gen_update_nip(ctx, ctx->nip - 4);
2497 tcg_gen_andi_tl(t0, EA, mask);
2498 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2499 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2500 t2 = tcg_const_i32(0);
2501 gen_helper_raise_exception_err(t1, t2);
2502 tcg_temp_free_i32(t1);
2503 tcg_temp_free_i32(t2);
2508 /*** Integer load ***/
2509 #if defined(TARGET_PPC64)
2510 #define GEN_QEMU_LD_PPC64(width) \
2511 static always_inline void gen_qemu_ld##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2513 if (likely(flags & 2)) \
2514 tcg_gen_qemu_ld##width(t0, t1, flags >> 2); \
2516 TCGv addr = tcg_temp_new(); \
2517 tcg_gen_ext32u_tl(addr, t1); \
2518 tcg_gen_qemu_ld##width(t0, addr, flags >> 2); \
2519 tcg_temp_free(addr); \
2522 GEN_QEMU_LD_PPC64(8u)
2523 GEN_QEMU_LD_PPC64(8s)
2524 GEN_QEMU_LD_PPC64(16u)
2525 GEN_QEMU_LD_PPC64(16s)
2526 GEN_QEMU_LD_PPC64(32u)
2527 GEN_QEMU_LD_PPC64(32s)
2528 GEN_QEMU_LD_PPC64(64)
2530 #define GEN_QEMU_ST_PPC64(width) \
2531 static always_inline void gen_qemu_st##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2533 if (likely(flags & 2)) \
2534 tcg_gen_qemu_st##width(t0, t1, flags >> 2); \
2536 TCGv addr = tcg_temp_new(); \
2537 tcg_gen_ext32u_tl(addr, t1); \
2538 tcg_gen_qemu_st##width(t0, addr, flags >> 2); \
2539 tcg_temp_free(addr); \
2542 GEN_QEMU_ST_PPC64(8)
2543 GEN_QEMU_ST_PPC64(16)
2544 GEN_QEMU_ST_PPC64(32)
2545 GEN_QEMU_ST_PPC64(64)
2547 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2549 gen_qemu_ld8u_ppc64(arg0, arg1, flags);
2552 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2554 gen_qemu_ld8s_ppc64(arg0, arg1, flags);
2557 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2559 if (unlikely(flags & 1)) {
2561 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2562 t0 = tcg_temp_new_i32();
2563 tcg_gen_trunc_tl_i32(t0, arg0);
2564 tcg_gen_bswap16_i32(t0, t0);
2565 tcg_gen_extu_i32_tl(arg0, t0);
2566 tcg_temp_free_i32(t0);
2568 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2571 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2573 if (unlikely(flags & 1)) {
2575 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2576 t0 = tcg_temp_new_i32();
2577 tcg_gen_trunc_tl_i32(t0, arg0);
2578 tcg_gen_bswap16_i32(t0, t0);
2579 tcg_gen_extu_i32_tl(arg0, t0);
2580 tcg_gen_ext16s_tl(arg0, arg0);
2581 tcg_temp_free_i32(t0);
2583 gen_qemu_ld16s_ppc64(arg0, arg1, flags);
2586 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2588 if (unlikely(flags & 1)) {
2590 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2591 t0 = tcg_temp_new_i32();
2592 tcg_gen_trunc_tl_i32(t0, arg0);
2593 tcg_gen_bswap_i32(t0, t0);
2594 tcg_gen_extu_i32_tl(arg0, t0);
2595 tcg_temp_free_i32(t0);
2597 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2600 static always_inline void gen_qemu_ld32s(TCGv arg0, TCGv arg1, int flags)
2602 if (unlikely(flags & 1)) {
2604 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2605 t0 = tcg_temp_new_i32();
2606 tcg_gen_trunc_tl_i32(t0, arg0);
2607 tcg_gen_bswap_i32(t0, t0);
2608 tcg_gen_ext_i32_tl(arg0, t0);
2609 tcg_temp_free_i32(t0);
2611 gen_qemu_ld32s_ppc64(arg0, arg1, flags);
2614 static always_inline void gen_qemu_ld64(TCGv arg0, TCGv arg1, int flags)
2616 gen_qemu_ld64_ppc64(arg0, arg1, flags);
2617 if (unlikely(flags & 1))
2618 tcg_gen_bswap_i64(arg0, arg0);
2621 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2623 gen_qemu_st8_ppc64(arg0, arg1, flags);
2626 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2628 if (unlikely(flags & 1)) {
2631 t0 = tcg_temp_new_i32();
2632 tcg_gen_trunc_tl_i32(t0, arg0);
2633 tcg_gen_ext16u_i32(t0, t0);
2634 tcg_gen_bswap16_i32(t0, t0);
2635 t1 = tcg_temp_new_i64();
2636 tcg_gen_extu_i32_tl(t1, t0);
2637 tcg_temp_free_i32(t0);
2638 gen_qemu_st16_ppc64(t1, arg1, flags);
2639 tcg_temp_free_i64(t1);
2641 gen_qemu_st16_ppc64(arg0, arg1, flags);
2644 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2646 if (unlikely(flags & 1)) {
2649 t0 = tcg_temp_new_i32();
2650 tcg_gen_trunc_tl_i32(t0, arg0);
2651 tcg_gen_bswap_i32(t0, t0);
2652 t1 = tcg_temp_new_i64();
2653 tcg_gen_extu_i32_tl(t1, t0);
2654 tcg_temp_free_i32(t0);
2655 gen_qemu_st32_ppc64(t1, arg1, flags);
2656 tcg_temp_free_i64(t1);
2658 gen_qemu_st32_ppc64(arg0, arg1, flags);
2661 static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
2663 if (unlikely(flags & 1)) {
2664 TCGv_i64 t0 = tcg_temp_new_i64();
2665 tcg_gen_bswap_i64(t0, arg0);
2666 gen_qemu_st64_ppc64(t0, arg1, flags);
2667 tcg_temp_free_i64(t0);
2669 gen_qemu_st64_ppc64(arg0, arg1, flags);
2673 #else /* defined(TARGET_PPC64) */
2674 #define GEN_QEMU_LD_PPC32(width) \
2675 static always_inline void gen_qemu_ld##width##_ppc32(TCGv arg0, TCGv arg1, int flags) \
2677 tcg_gen_qemu_ld##width(arg0, arg1, flags >> 1); \
2679 GEN_QEMU_LD_PPC32(8u)
2680 GEN_QEMU_LD_PPC32(8s)
2681 GEN_QEMU_LD_PPC32(16u)
2682 GEN_QEMU_LD_PPC32(16s)
2683 GEN_QEMU_LD_PPC32(32u)
2684 GEN_QEMU_LD_PPC32(32s)
2685 static always_inline void gen_qemu_ld64_ppc32(TCGv_i64 arg0, TCGv arg1, int flags)
2687 tcg_gen_qemu_ld64(arg0, arg1, flags >> 1);
2690 #define GEN_QEMU_ST_PPC32(width) \
2691 static always_inline void gen_qemu_st##width##_ppc32(TCGv arg0, TCGv arg1, int flags) \
2693 tcg_gen_qemu_st##width(arg0, arg1, flags >> 1); \
2695 GEN_QEMU_ST_PPC32(8)
2696 GEN_QEMU_ST_PPC32(16)
2697 GEN_QEMU_ST_PPC32(32)
2698 static always_inline void gen_qemu_st64_ppc32(TCGv_i64 arg0, TCGv arg1, int flags)
2700 tcg_gen_qemu_st64(arg0, arg1, flags >> 1);
2703 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2705 gen_qemu_ld8u_ppc32(arg0, arg1, flags >> 1);
2708 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2710 gen_qemu_ld8s_ppc32(arg0, arg1, flags >> 1);
2713 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2715 gen_qemu_ld16u_ppc32(arg0, arg1, flags >> 1);
2716 if (unlikely(flags & 1))
2717 tcg_gen_bswap16_i32(arg0, arg0);
2720 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2722 if (unlikely(flags & 1)) {
2723 gen_qemu_ld16u_ppc32(arg0, arg1, flags);
2724 tcg_gen_bswap16_i32(arg0, arg0);
2725 tcg_gen_ext16s_i32(arg0, arg0);
2727 gen_qemu_ld16s_ppc32(arg0, arg1, flags);
2730 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2732 gen_qemu_ld32u_ppc32(arg0, arg1, flags);
2733 if (unlikely(flags & 1))
2734 tcg_gen_bswap_i32(arg0, arg0);
2737 static always_inline void gen_qemu_ld64(TCGv_i64 arg0, TCGv arg1, int flags)
2739 gen_qemu_ld64_ppc32(arg0, arg1, flags);
2740 if (unlikely(flags & 1))
2741 tcg_gen_bswap_i64(arg0, arg0);
2744 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2746 gen_qemu_st8_ppc32(arg0, arg1, flags);
2749 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2751 if (unlikely(flags & 1)) {
2752 TCGv_i32 temp = tcg_temp_new_i32();
2753 tcg_gen_ext16u_i32(temp, arg0);
2754 tcg_gen_bswap16_i32(temp, temp);
2755 gen_qemu_st16_ppc32(temp, arg1, flags);
2756 tcg_temp_free_i32(temp);
2758 gen_qemu_st16_ppc32(arg0, arg1, flags);
2761 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2763 if (unlikely(flags & 1)) {
2764 TCGv_i32 temp = tcg_temp_new_i32();
2765 tcg_gen_bswap_i32(temp, arg0);
2766 gen_qemu_st32_ppc32(temp, arg1, flags);
2767 tcg_temp_free_i32(temp);
2769 gen_qemu_st32_ppc32(arg0, arg1, flags);
2772 static always_inline void gen_qemu_st64(TCGv_i64 arg0, TCGv arg1, int flags)
2774 if (unlikely(flags & 1)) {
2775 TCGv_i64 temp = tcg_temp_new_i64();
2776 tcg_gen_bswap_i64(temp, arg0);
2777 gen_qemu_st64_ppc32(temp, arg1, flags);
2778 tcg_temp_free_i64(temp);
2780 gen_qemu_st64_ppc32(arg0, arg1, flags);
2784 #define GEN_LD(name, ldop, opc, type) \
2785 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
2787 TCGv EA = tcg_temp_new(); \
2788 gen_set_access_type(ACCESS_INT); \
2789 gen_addr_imm_index(EA, ctx, 0); \
2790 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2791 tcg_temp_free(EA); \
2794 #define GEN_LDU(name, ldop, opc, type) \
2795 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2798 if (unlikely(rA(ctx->opcode) == 0 || \
2799 rA(ctx->opcode) == rD(ctx->opcode))) { \
2800 GEN_EXCP_INVAL(ctx); \
2803 EA = tcg_temp_new(); \
2804 gen_set_access_type(ACCESS_INT); \
2805 if (type == PPC_64B) \
2806 gen_addr_imm_index(EA, ctx, 0x03); \
2808 gen_addr_imm_index(EA, ctx, 0); \
2809 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2810 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2811 tcg_temp_free(EA); \
2814 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2815 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2818 if (unlikely(rA(ctx->opcode) == 0 || \
2819 rA(ctx->opcode) == rD(ctx->opcode))) { \
2820 GEN_EXCP_INVAL(ctx); \
2823 EA = tcg_temp_new(); \
2824 gen_set_access_type(ACCESS_INT); \
2825 gen_addr_reg_index(EA, ctx); \
2826 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2827 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2828 tcg_temp_free(EA); \
2831 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2832 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
2834 TCGv EA = tcg_temp_new(); \
2835 gen_set_access_type(ACCESS_INT); \
2836 gen_addr_reg_index(EA, ctx); \
2837 gen_qemu_##ldop(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2838 tcg_temp_free(EA); \
2841 #define GEN_LDS(name, ldop, op, type) \
2842 GEN_LD(name, ldop, op | 0x20, type); \
2843 GEN_LDU(name, ldop, op | 0x21, type); \
2844 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2845 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2847 /* lbz lbzu lbzux lbzx */
2848 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2849 /* lha lhau lhaux lhax */
2850 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2851 /* lhz lhzu lhzux lhzx */
2852 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2853 /* lwz lwzu lwzux lwzx */
2854 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2855 #if defined(TARGET_PPC64)
2857 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2859 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2861 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2863 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2864 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2867 if (Rc(ctx->opcode)) {
2868 if (unlikely(rA(ctx->opcode) == 0 ||
2869 rA(ctx->opcode) == rD(ctx->opcode))) {
2870 GEN_EXCP_INVAL(ctx);
2874 EA = tcg_temp_new();
2875 gen_set_access_type(ACCESS_INT);
2876 gen_addr_imm_index(EA, ctx, 0x03);
2877 if (ctx->opcode & 0x02) {
2878 /* lwa (lwau is undefined) */
2879 gen_qemu_ld32s(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2882 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2884 if (Rc(ctx->opcode))
2885 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2889 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2891 #if defined(CONFIG_USER_ONLY)
2892 GEN_EXCP_PRIVOPC(ctx);
2897 /* Restore CPU state */
2898 if (unlikely(ctx->supervisor == 0)) {
2899 GEN_EXCP_PRIVOPC(ctx);
2902 ra = rA(ctx->opcode);
2903 rd = rD(ctx->opcode);
2904 if (unlikely((rd & 1) || rd == ra)) {
2905 GEN_EXCP_INVAL(ctx);
2908 if (unlikely(ctx->mem_idx & 1)) {
2909 /* Little-endian mode is not handled */
2910 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2913 EA = tcg_temp_new();
2914 gen_set_access_type(ACCESS_INT);
2915 gen_addr_imm_index(EA, ctx, 0x0F);
2916 gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
2917 tcg_gen_addi_tl(EA, EA, 8);
2918 gen_qemu_ld64(cpu_gpr[rd+1], EA, ctx->mem_idx);
2924 /*** Integer store ***/
2925 #define GEN_ST(name, stop, opc, type) \
2926 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
2928 TCGv EA = tcg_temp_new(); \
2929 gen_set_access_type(ACCESS_INT); \
2930 gen_addr_imm_index(EA, ctx, 0); \
2931 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2932 tcg_temp_free(EA); \
2935 #define GEN_STU(name, stop, opc, type) \
2936 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2939 if (unlikely(rA(ctx->opcode) == 0)) { \
2940 GEN_EXCP_INVAL(ctx); \
2943 EA = tcg_temp_new(); \
2944 gen_set_access_type(ACCESS_INT); \
2945 if (type == PPC_64B) \
2946 gen_addr_imm_index(EA, ctx, 0x03); \
2948 gen_addr_imm_index(EA, ctx, 0); \
2949 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2950 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2951 tcg_temp_free(EA); \
2954 #define GEN_STUX(name, stop, opc2, opc3, type) \
2955 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2958 if (unlikely(rA(ctx->opcode) == 0)) { \
2959 GEN_EXCP_INVAL(ctx); \
2962 EA = tcg_temp_new(); \
2963 gen_set_access_type(ACCESS_INT); \
2964 gen_addr_reg_index(EA, ctx); \
2965 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2966 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2967 tcg_temp_free(EA); \
2970 #define GEN_STX(name, stop, opc2, opc3, type) \
2971 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
2973 TCGv EA = tcg_temp_new(); \
2974 gen_set_access_type(ACCESS_INT); \
2975 gen_addr_reg_index(EA, ctx); \
2976 gen_qemu_##stop(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2977 tcg_temp_free(EA); \
2980 #define GEN_STS(name, stop, op, type) \
2981 GEN_ST(name, stop, op | 0x20, type); \
2982 GEN_STU(name, stop, op | 0x21, type); \
2983 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2984 GEN_STX(name, stop, 0x17, op | 0x00, type)
2986 /* stb stbu stbux stbx */
2987 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2988 /* sth sthu sthux sthx */
2989 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2990 /* stw stwu stwux stwx */
2991 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2992 #if defined(TARGET_PPC64)
2993 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2994 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2995 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
3000 rs = rS(ctx->opcode);
3001 if ((ctx->opcode & 0x3) == 0x2) {
3002 #if defined(CONFIG_USER_ONLY)
3003 GEN_EXCP_PRIVOPC(ctx);
3006 if (unlikely(ctx->supervisor == 0)) {
3007 GEN_EXCP_PRIVOPC(ctx);
3010 if (unlikely(rs & 1)) {
3011 GEN_EXCP_INVAL(ctx);
3014 if (unlikely(ctx->mem_idx & 1)) {
3015 /* Little-endian mode is not handled */
3016 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
3019 EA = tcg_temp_new();
3020 gen_set_access_type(ACCESS_INT);
3021 gen_addr_imm_index(EA, ctx, 0x03);
3022 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3023 tcg_gen_addi_tl(EA, EA, 8);
3024 gen_qemu_st64(cpu_gpr[rs+1], EA, ctx->mem_idx);
3029 if (Rc(ctx->opcode)) {
3030 if (unlikely(rA(ctx->opcode) == 0)) {
3031 GEN_EXCP_INVAL(ctx);
3035 EA = tcg_temp_new();
3036 gen_set_access_type(ACCESS_INT);
3037 gen_addr_imm_index(EA, ctx, 0x03);
3038 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3039 if (Rc(ctx->opcode))
3040 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3045 /*** Integer load and store with byte reverse ***/
3047 void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
3049 TCGv_i32 temp = tcg_temp_new_i32();
3050 gen_qemu_ld16u(t0, t1, flags);
3051 tcg_gen_trunc_tl_i32(temp, t0);
3052 tcg_gen_bswap16_i32(temp, temp);
3053 tcg_gen_extu_i32_tl(t0, temp);
3054 tcg_temp_free_i32(temp);
3056 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3059 void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
3061 TCGv_i32 temp = tcg_temp_new_i32();
3062 gen_qemu_ld32u(t0, t1, flags);
3063 tcg_gen_trunc_tl_i32(temp, t0);
3064 tcg_gen_bswap_i32(temp, temp);
3065 tcg_gen_extu_i32_tl(t0, temp);
3066 tcg_temp_free_i32(temp);
3068 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3071 void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
3073 TCGv_i32 temp = tcg_temp_new_i32();
3074 TCGv t2 = tcg_temp_new();
3075 tcg_gen_trunc_tl_i32(temp, t0);
3076 tcg_gen_ext16u_i32(temp, temp);
3077 tcg_gen_bswap16_i32(temp, temp);
3078 tcg_gen_extu_i32_tl(t2, temp);
3079 tcg_temp_free_i32(temp);
3080 gen_qemu_st16(t2, t1, flags);
3083 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3086 void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
3088 TCGv_i32 temp = tcg_temp_new_i32();
3089 TCGv t2 = tcg_temp_new();
3090 tcg_gen_trunc_tl_i32(temp, t0);
3091 tcg_gen_bswap_i32(temp, temp);
3092 tcg_gen_extu_i32_tl(t2, temp);
3093 tcg_temp_free_i32(temp);
3094 gen_qemu_st32(t2, t1, flags);
3097 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3099 /*** Integer load and store multiple ***/
3101 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3103 TCGv t0 = tcg_temp_new();
3104 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
3105 /* NIP cannot be restored if the memory exception comes from an helper */
3106 gen_update_nip(ctx, ctx->nip - 4);
3107 gen_addr_imm_index(t0, ctx, 0);
3108 gen_helper_lmw(t0, t1);
3110 tcg_temp_free_i32(t1);
3114 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3116 TCGv t0 = tcg_temp_new();
3117 TCGv_i32 t1 = tcg_const_i32(rS(ctx->opcode));
3118 /* NIP cannot be restored if the memory exception comes from an helper */
3119 gen_update_nip(ctx, ctx->nip - 4);
3120 gen_addr_imm_index(t0, ctx, 0);
3121 gen_helper_stmw(t0, t1);
3123 tcg_temp_free_i32(t1);
3126 /*** Integer load and store strings ***/
3128 /* PowerPC32 specification says we must generate an exception if
3129 * rA is in the range of registers to be loaded.
3130 * In an other hand, IBM says this is valid, but rA won't be loaded.
3131 * For now, I'll follow the spec...
3133 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
3137 int nb = NB(ctx->opcode);
3138 int start = rD(ctx->opcode);
3139 int ra = rA(ctx->opcode);
3145 if (unlikely(((start + nr) > 32 &&
3146 start <= ra && (start + nr - 32) > ra) ||
3147 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3148 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3149 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
3152 /* NIP cannot be restored if the memory exception comes from an helper */
3153 gen_update_nip(ctx, ctx->nip - 4);
3154 t0 = tcg_temp_new();
3155 gen_addr_register(t0, ctx);
3156 t1 = tcg_const_i32(nb);
3157 t2 = tcg_const_i32(start);
3158 gen_helper_lsw(t0, t1, t2);
3160 tcg_temp_free_i32(t1);
3161 tcg_temp_free_i32(t2);
3165 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
3167 TCGv t0 = tcg_temp_new();
3168 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
3169 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
3170 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
3171 /* NIP cannot be restored if the memory exception comes from an helper */
3172 gen_update_nip(ctx, ctx->nip - 4);
3173 gen_addr_reg_index(t0, ctx);
3174 gen_helper_lswx(t0, t1, t2, t3);
3176 tcg_temp_free_i32(t1);
3177 tcg_temp_free_i32(t2);
3178 tcg_temp_free_i32(t3);
3182 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
3184 int nb = NB(ctx->opcode);
3185 TCGv t0 = tcg_temp_new();
3187 TCGv_i32 t2 = tcg_const_i32(rS(ctx->opcode));
3188 /* NIP cannot be restored if the memory exception comes from an helper */
3189 gen_update_nip(ctx, ctx->nip - 4);
3190 gen_addr_register(t0, ctx);
3193 t1 = tcg_const_i32(nb);
3194 gen_helper_stsw(t0, t1, t2);
3196 tcg_temp_free_i32(t1);
3197 tcg_temp_free_i32(t2);
3201 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
3203 TCGv t0 = tcg_temp_new();
3204 TCGv_i32 t1 = tcg_temp_new_i32();
3205 TCGv_i32 t2 = tcg_const_i32(rS(ctx->opcode));
3206 /* NIP cannot be restored if the memory exception comes from an helper */
3207 gen_update_nip(ctx, ctx->nip - 4);
3208 gen_addr_reg_index(t0, ctx);
3209 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3210 tcg_gen_andi_i32(t1, t1, 0x7F);
3211 gen_helper_stsw(t0, t1, t2);
3213 tcg_temp_free_i32(t1);
3214 tcg_temp_free_i32(t2);
3217 /*** Memory synchronisation ***/
3219 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
3224 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
3230 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
3232 TCGv t0 = tcg_temp_local_new();
3233 gen_set_access_type(ACCESS_RES);
3234 gen_addr_reg_index(t0, ctx);
3235 gen_check_align(ctx, t0, 0x03);
3236 #if defined(TARGET_PPC64)
3238 tcg_gen_ext32u_tl(t0, t0);
3240 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
3241 tcg_gen_mov_tl(cpu_reserve, t0);
3246 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
3248 int l1 = gen_new_label();
3249 TCGv t0 = tcg_temp_local_new();
3250 gen_set_access_type(ACCESS_RES);
3251 gen_addr_reg_index(t0, ctx);
3252 gen_check_align(ctx, t0, 0x03);
3253 #if defined(TARGET_PPC64)
3255 tcg_gen_ext32u_tl(t0, t0);
3257 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3258 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3259 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3260 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3261 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3262 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
3264 tcg_gen_movi_tl(cpu_reserve, -1);
3268 #if defined(TARGET_PPC64)
3270 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
3272 TCGv t0 = tcg_temp_local_new();
3273 gen_set_access_type(ACCESS_RES);
3274 gen_addr_reg_index(t0, ctx);
3275 gen_check_align(ctx, t0, 0x07);
3277 tcg_gen_ext32u_tl(t0, t0);
3278 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
3279 tcg_gen_mov_tl(cpu_reserve, t0);
3284 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
3286 int l1 = gen_new_label();
3287 TCGv t0 = tcg_temp_local_new();
3288 gen_set_access_type(ACCESS_RES);
3289 gen_addr_reg_index(t0, ctx);
3290 gen_check_align(ctx, t0, 0x07);
3292 tcg_gen_ext32u_tl(t0, t0);
3293 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_xer);
3294 tcg_gen_shri_i32(cpu_crf[0], cpu_crf[0], XER_SO);
3295 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 1);
3296 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3297 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3298 gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], t0, ctx->mem_idx);
3300 tcg_gen_movi_tl(cpu_reserve, -1);
3303 #endif /* defined(TARGET_PPC64) */
3306 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
3311 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
3313 TCGv_i32 t0 = tcg_temp_new_i32();
3314 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUState, halted));
3315 tcg_temp_free_i32(t0);
3316 /* Stop translation, as the CPU is supposed to sleep from now */
3317 GEN_EXCP(ctx, EXCP_HLT, 1);
3320 /*** Floating-point load ***/
3321 #define GEN_LDF(name, ldop, opc, type) \
3322 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
3325 if (unlikely(!ctx->fpu_enabled)) { \
3326 GEN_EXCP_NO_FP(ctx); \
3329 gen_set_access_type(ACCESS_FLOAT); \
3330 EA = tcg_temp_new(); \
3331 gen_addr_imm_index(EA, ctx, 0); \
3332 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3333 tcg_temp_free(EA); \
3336 #define GEN_LDUF(name, ldop, opc, type) \
3337 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3340 if (unlikely(!ctx->fpu_enabled)) { \
3341 GEN_EXCP_NO_FP(ctx); \
3344 if (unlikely(rA(ctx->opcode) == 0)) { \
3345 GEN_EXCP_INVAL(ctx); \
3348 gen_set_access_type(ACCESS_FLOAT); \
3349 EA = tcg_temp_new(); \
3350 gen_addr_imm_index(EA, ctx, 0); \
3351 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3352 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3353 tcg_temp_free(EA); \
3356 #define GEN_LDUXF(name, ldop, opc, type) \
3357 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3360 if (unlikely(!ctx->fpu_enabled)) { \
3361 GEN_EXCP_NO_FP(ctx); \
3364 if (unlikely(rA(ctx->opcode) == 0)) { \
3365 GEN_EXCP_INVAL(ctx); \
3368 gen_set_access_type(ACCESS_FLOAT); \
3369 EA = tcg_temp_new(); \
3370 gen_addr_reg_index(EA, ctx); \
3371 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3372 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3373 tcg_temp_free(EA); \
3376 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3377 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
3380 if (unlikely(!ctx->fpu_enabled)) { \
3381 GEN_EXCP_NO_FP(ctx); \
3384 gen_set_access_type(ACCESS_FLOAT); \
3385 EA = tcg_temp_new(); \
3386 gen_addr_reg_index(EA, ctx); \
3387 gen_qemu_##ldop(cpu_fpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
3388 tcg_temp_free(EA); \
3391 #define GEN_LDFS(name, ldop, op, type) \
3392 GEN_LDF(name, ldop, op | 0x20, type); \
3393 GEN_LDUF(name, ldop, op | 0x21, type); \
3394 GEN_LDUXF(name, ldop, op | 0x01, type); \
3395 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3397 static always_inline void gen_qemu_ld32fs(TCGv_i64 arg1, TCGv arg2, int flags)
3399 TCGv t0 = tcg_temp_new();
3400 TCGv_i32 t1 = tcg_temp_new_i32();
3401 gen_qemu_ld32u(t0, arg2, flags);
3402 tcg_gen_trunc_tl_i32(t1, t0);
3404 gen_helper_float32_to_float64(arg1, t1);
3405 tcg_temp_free_i32(t1);
3408 /* lfd lfdu lfdux lfdx */
3409 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3410 /* lfs lfsu lfsux lfsx */
3411 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3413 /*** Floating-point store ***/
3414 #define GEN_STF(name, stop, opc, type) \
3415 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type) \
3418 if (unlikely(!ctx->fpu_enabled)) { \
3419 GEN_EXCP_NO_FP(ctx); \
3422 gen_set_access_type(ACCESS_FLOAT); \
3423 EA = tcg_temp_new(); \
3424 gen_addr_imm_index(EA, ctx, 0); \
3425 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3426 tcg_temp_free(EA); \
3429 #define GEN_STUF(name, stop, opc, type) \
3430 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3433 if (unlikely(!ctx->fpu_enabled)) { \
3434 GEN_EXCP_NO_FP(ctx); \
3437 if (unlikely(rA(ctx->opcode) == 0)) { \
3438 GEN_EXCP_INVAL(ctx); \
3441 gen_set_access_type(ACCESS_FLOAT); \
3442 EA = tcg_temp_new(); \
3443 gen_addr_imm_index(EA, ctx, 0); \
3444 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3445 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3446 tcg_temp_free(EA); \
3449 #define GEN_STUXF(name, stop, opc, type) \
3450 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3453 if (unlikely(!ctx->fpu_enabled)) { \
3454 GEN_EXCP_NO_FP(ctx); \
3457 if (unlikely(rA(ctx->opcode) == 0)) { \
3458 GEN_EXCP_INVAL(ctx); \
3461 gen_set_access_type(ACCESS_FLOAT); \
3462 EA = tcg_temp_new(); \
3463 gen_addr_reg_index(EA, ctx); \
3464 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3465 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3466 tcg_temp_free(EA); \
3469 #define GEN_STXF(name, stop, opc2, opc3, type) \
3470 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type) \
3473 if (unlikely(!ctx->fpu_enabled)) { \
3474 GEN_EXCP_NO_FP(ctx); \
3477 gen_set_access_type(ACCESS_FLOAT); \
3478 EA = tcg_temp_new(); \
3479 gen_addr_reg_index(EA, ctx); \
3480 gen_qemu_##stop(cpu_fpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
3481 tcg_temp_free(EA); \
3484 #define GEN_STFS(name, stop, op, type) \
3485 GEN_STF(name, stop, op | 0x20, type); \
3486 GEN_STUF(name, stop, op | 0x21, type); \
3487 GEN_STUXF(name, stop, op | 0x01, type); \
3488 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3490 static always_inline void gen_qemu_st32fs(TCGv_i64 arg1, TCGv arg2, int flags)
3492 TCGv_i32 t0 = tcg_temp_new_i32();
3493 TCGv t1 = tcg_temp_new();
3494 gen_helper_float64_to_float32(t0, arg1);
3495 tcg_gen_extu_i32_tl(t1, t0);
3496 tcg_temp_free_i32(t0);
3497 gen_qemu_st32(t1, arg2, flags);
3501 /* stfd stfdu stfdux stfdx */
3502 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3503 /* stfs stfsu stfsux stfsx */
3504 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3507 static always_inline void gen_qemu_st32fiw(TCGv_i64 arg1, TCGv arg2, int flags)
3509 TCGv t0 = tcg_temp_new();
3510 tcg_gen_trunc_i64_tl(t0, arg1),
3511 gen_qemu_st32(t0, arg2, flags);
3515 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3518 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
3521 TranslationBlock *tb;
3523 #if defined(TARGET_PPC64)
3525 dest = (uint32_t) dest;
3527 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3528 likely(!ctx->singlestep_enabled)) {
3530 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3531 tcg_gen_exit_tb((long)tb + n);
3533 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3534 if (unlikely(ctx->singlestep_enabled)) {
3535 if ((ctx->singlestep_enabled &
3536 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3537 ctx->exception == POWERPC_EXCP_BRANCH) {
3538 target_ulong tmp = ctx->nip;
3540 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
3543 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3544 gen_update_nip(ctx, dest);
3545 gen_helper_raise_debug();
3552 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3554 #if defined(TARGET_PPC64)
3555 if (ctx->sf_mode == 0)
3556 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3559 tcg_gen_movi_tl(cpu_lr, nip);
3563 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3565 target_ulong li, target;
3567 ctx->exception = POWERPC_EXCP_BRANCH;
3568 /* sign extend LI */
3569 #if defined(TARGET_PPC64)
3571 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3574 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3575 if (likely(AA(ctx->opcode) == 0))
3576 target = ctx->nip + li - 4;
3579 if (LK(ctx->opcode))
3580 gen_setlr(ctx, ctx->nip);
3581 gen_goto_tb(ctx, 0, target);
3588 static always_inline void gen_bcond (DisasContext *ctx, int type)
3590 uint32_t bo = BO(ctx->opcode);
3591 int l1 = gen_new_label();
3594 ctx->exception = POWERPC_EXCP_BRANCH;
3595 if (type == BCOND_LR || type == BCOND_CTR) {
3596 target = tcg_temp_local_new();
3597 if (type == BCOND_CTR)
3598 tcg_gen_mov_tl(target, cpu_ctr);
3600 tcg_gen_mov_tl(target, cpu_lr);
3602 if (LK(ctx->opcode))
3603 gen_setlr(ctx, ctx->nip);
3604 l1 = gen_new_label();
3605 if ((bo & 0x4) == 0) {
3606 /* Decrement and test CTR */
3607 TCGv temp = tcg_temp_new();
3608 if (unlikely(type == BCOND_CTR)) {
3609 GEN_EXCP_INVAL(ctx);
3612 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3613 #if defined(TARGET_PPC64)
3615 tcg_gen_ext32u_tl(temp, cpu_ctr);
3618 tcg_gen_mov_tl(temp, cpu_ctr);
3620 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3622 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3624 tcg_temp_free(temp);
3626 if ((bo & 0x10) == 0) {
3628 uint32_t bi = BI(ctx->opcode);
3629 uint32_t mask = 1 << (3 - (bi & 0x03));
3630 TCGv_i32 temp = tcg_temp_new_i32();
3633 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3634 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3636 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3637 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3639 tcg_temp_free_i32(temp);
3641 if (type == BCOND_IM) {
3642 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3643 if (likely(AA(ctx->opcode) == 0)) {
3644 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3646 gen_goto_tb(ctx, 0, li);
3649 gen_goto_tb(ctx, 1, ctx->nip);
3651 #if defined(TARGET_PPC64)
3652 if (!(ctx->sf_mode))
3653 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3656 tcg_gen_andi_tl(cpu_nip, target, ~3);
3659 #if defined(TARGET_PPC64)
3660 if (!(ctx->sf_mode))
3661 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3664 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3669 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3671 gen_bcond(ctx, BCOND_IM);
3674 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3676 gen_bcond(ctx, BCOND_CTR);
3679 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3681 gen_bcond(ctx, BCOND_LR);
3684 /*** Condition register logical ***/
3685 #define GEN_CRLOGIC(name, tcg_op, opc) \
3686 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3691 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3692 t0 = tcg_temp_new_i32(); \
3694 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3696 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3698 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3699 t1 = tcg_temp_new_i32(); \
3700 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3702 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3704 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3706 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3707 tcg_op(t0, t0, t1); \
3708 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3709 tcg_gen_andi_i32(t0, t0, bitmask); \
3710 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3711 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3712 tcg_temp_free_i32(t0); \
3713 tcg_temp_free_i32(t1); \
3717 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3719 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3721 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3723 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3725 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3727 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3729 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3731 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3733 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3735 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3738 /*** System linkage ***/
3739 /* rfi (supervisor only) */
3740 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3742 #if defined(CONFIG_USER_ONLY)
3743 GEN_EXCP_PRIVOPC(ctx);
3745 /* Restore CPU state */
3746 if (unlikely(!ctx->supervisor)) {
3747 GEN_EXCP_PRIVOPC(ctx);
3755 #if defined(TARGET_PPC64)
3756 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3758 #if defined(CONFIG_USER_ONLY)
3759 GEN_EXCP_PRIVOPC(ctx);
3761 /* Restore CPU state */
3762 if (unlikely(!ctx->supervisor)) {
3763 GEN_EXCP_PRIVOPC(ctx);
3771 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3773 #if defined(CONFIG_USER_ONLY)
3774 GEN_EXCP_PRIVOPC(ctx);
3776 /* Restore CPU state */
3777 if (unlikely(ctx->supervisor <= 1)) {
3778 GEN_EXCP_PRIVOPC(ctx);
3788 #if defined(CONFIG_USER_ONLY)
3789 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3791 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3793 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3797 lev = (ctx->opcode >> 5) & 0x7F;
3798 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3803 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3805 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3806 /* Update the nip since this might generate a trap exception */
3807 gen_update_nip(ctx, ctx->nip);
3808 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3809 tcg_temp_free_i32(t0);
3813 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3815 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3816 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3817 /* Update the nip since this might generate a trap exception */
3818 gen_update_nip(ctx, ctx->nip);
3819 gen_helper_tw(cpu_gpr[rA(ctx->opcode)], t0, t1);
3821 tcg_temp_free_i32(t1);
3824 #if defined(TARGET_PPC64)
3826 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3828 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3829 /* Update the nip since this might generate a trap exception */
3830 gen_update_nip(ctx, ctx->nip);
3831 gen_helper_td(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
3832 tcg_temp_free_i32(t0);
3836 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3838 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3839 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3840 /* Update the nip since this might generate a trap exception */
3841 gen_update_nip(ctx, ctx->nip);
3842 gen_helper_td(cpu_gpr[rA(ctx->opcode)], t0, t1);
3844 tcg_temp_free_i32(t1);
3848 /*** Processor control ***/
3850 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3852 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3853 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3854 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3858 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3862 if (likely(ctx->opcode & 0x00100000)) {
3863 crm = CRM(ctx->opcode);
3864 if (likely((crm ^ (crm - 1)) == 0)) {
3866 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3869 gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]);
3874 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3876 #if defined(CONFIG_USER_ONLY)
3877 GEN_EXCP_PRIVREG(ctx);
3879 if (unlikely(!ctx->supervisor)) {
3880 GEN_EXCP_PRIVREG(ctx);
3883 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
3888 #define SPR_NOACCESS ((void *)(-1UL))
3890 static void spr_noaccess (void *opaque, int sprn)
3892 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3893 printf("ERROR: try to access SPR %d !\n", sprn);
3895 #define SPR_NOACCESS (&spr_noaccess)
3899 static always_inline void gen_op_mfspr (DisasContext *ctx)
3901 void (*read_cb)(void *opaque, int sprn);
3902 uint32_t sprn = SPR(ctx->opcode);
3904 #if !defined(CONFIG_USER_ONLY)
3905 if (ctx->supervisor == 2)
3906 read_cb = ctx->spr_cb[sprn].hea_read;
3907 else if (ctx->supervisor)
3908 read_cb = ctx->spr_cb[sprn].oea_read;
3911 read_cb = ctx->spr_cb[sprn].uea_read;
3912 if (likely(read_cb != NULL)) {
3913 if (likely(read_cb != SPR_NOACCESS)) {
3914 (*read_cb)(ctx, sprn);
3915 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3917 /* Privilege exception */
3918 /* This is a hack to avoid warnings when running Linux:
3919 * this OS breaks the PowerPC virtualisation model,
3920 * allowing userland application to read the PVR
3922 if (sprn != SPR_PVR) {
3923 if (loglevel != 0) {
3924 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3925 ADDRX "\n", sprn, sprn, ctx->nip);
3927 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3928 sprn, sprn, ctx->nip);
3930 GEN_EXCP_PRIVREG(ctx);
3934 if (loglevel != 0) {
3935 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3936 ADDRX "\n", sprn, sprn, ctx->nip);
3938 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3939 sprn, sprn, ctx->nip);
3940 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3941 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3945 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3951 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3957 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3961 crm = CRM(ctx->opcode);
3962 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3963 TCGv_i32 temp = tcg_temp_new_i32();
3965 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3966 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3967 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3968 tcg_temp_free_i32(temp);
3970 TCGv_i32 temp = tcg_const_i32(crm);
3971 gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp);
3972 tcg_temp_free_i32(temp);
3977 #if defined(TARGET_PPC64)
3978 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3980 #if defined(CONFIG_USER_ONLY)
3981 GEN_EXCP_PRIVREG(ctx);
3983 if (unlikely(!ctx->supervisor)) {
3984 GEN_EXCP_PRIVREG(ctx);
3987 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3988 if (ctx->opcode & 0x00010000) {
3989 /* Special form that does not need any synchronisation */
3990 TCGv t0 = tcg_temp_new();
3991 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
3992 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
3993 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
3996 /* XXX: we need to update nip before the store
3997 * if we enter power saving mode, we will exit the loop
3998 * directly from ppc_store_msr
4000 gen_update_nip(ctx, ctx->nip);
4001 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
4002 /* Must stop the translation as machine state (may have) changed */
4003 /* Note that mtmsr is not always defined as context-synchronizing */
4004 ctx->exception = POWERPC_EXCP_STOP;
4010 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
4012 #if defined(CONFIG_USER_ONLY)
4013 GEN_EXCP_PRIVREG(ctx);
4015 if (unlikely(!ctx->supervisor)) {
4016 GEN_EXCP_PRIVREG(ctx);
4019 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4020 if (ctx->opcode & 0x00010000) {
4021 /* Special form that does not need any synchronisation */
4022 TCGv t0 = tcg_temp_new();
4023 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4024 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4025 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4028 /* XXX: we need to update nip before the store
4029 * if we enter power saving mode, we will exit the loop
4030 * directly from ppc_store_msr
4032 gen_update_nip(ctx, ctx->nip);
4033 #if defined(TARGET_PPC64)
4034 if (!ctx->sf_mode) {
4035 TCGv t0 = tcg_temp_new();
4036 TCGv t1 = tcg_temp_new();
4037 tcg_gen_andi_tl(t0, cpu_msr, 0xFFFFFFFF00000000ULL);
4038 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
4039 tcg_gen_or_tl(t0, t0, t1);
4041 gen_helper_store_msr(t0);
4045 gen_helper_store_msr(cpu_gpr[rS(ctx->opcode)]);
4046 /* Must stop the translation as machine state (may have) changed */
4047 /* Note that mtmsr is not always defined as context-synchronizing */
4048 ctx->exception = POWERPC_EXCP_STOP;
4054 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
4056 void (*write_cb)(void *opaque, int sprn);
4057 uint32_t sprn = SPR(ctx->opcode);
4059 #if !defined(CONFIG_USER_ONLY)
4060 if (ctx->supervisor == 2)
4061 write_cb = ctx->spr_cb[sprn].hea_write;
4062 else if (ctx->supervisor)
4063 write_cb = ctx->spr_cb[sprn].oea_write;
4066 write_cb = ctx->spr_cb[sprn].uea_write;
4067 if (likely(write_cb != NULL)) {
4068 if (likely(write_cb != SPR_NOACCESS)) {
4069 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4070 (*write_cb)(ctx, sprn);
4072 /* Privilege exception */
4073 if (loglevel != 0) {
4074 fprintf(logfile, "Trying to write privileged spr %d %03x at "
4075 ADDRX "\n", sprn, sprn, ctx->nip);
4077 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
4078 sprn, sprn, ctx->nip);
4079 GEN_EXCP_PRIVREG(ctx);
4083 if (loglevel != 0) {
4084 fprintf(logfile, "Trying to write invalid spr %d %03x at "
4085 ADDRX "\n", sprn, sprn, ctx->nip);
4087 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
4088 sprn, sprn, ctx->nip);
4089 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
4090 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
4094 /*** Cache management ***/
4096 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
4098 /* XXX: specification says this is treated as a load by the MMU */
4099 TCGv t0 = tcg_temp_new();
4100 gen_set_access_type(ACCESS_CACHE);
4101 gen_addr_reg_index(t0, ctx);
4102 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4106 /* dcbi (Supervisor only) */
4107 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
4109 #if defined(CONFIG_USER_ONLY)
4110 GEN_EXCP_PRIVOPC(ctx);
4113 if (unlikely(!ctx->supervisor)) {
4114 GEN_EXCP_PRIVOPC(ctx);
4117 EA = tcg_temp_new();
4118 gen_set_access_type(ACCESS_CACHE);
4119 gen_addr_reg_index(EA, ctx);
4120 val = tcg_temp_new();
4121 /* XXX: specification says this should be treated as a store by the MMU */
4122 gen_qemu_ld8u(val, EA, ctx->mem_idx);
4123 gen_qemu_st8(val, EA, ctx->mem_idx);
4130 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
4132 /* XXX: specification say this is treated as a load by the MMU */
4133 TCGv t0 = tcg_temp_new();
4134 gen_set_access_type(ACCESS_CACHE);
4135 gen_addr_reg_index(t0, ctx);
4136 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4141 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
4143 /* interpreted as no-op */
4144 /* XXX: specification say this is treated as a load by the MMU
4145 * but does not generate any exception
4150 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
4152 /* interpreted as no-op */
4153 /* XXX: specification say this is treated as a load by the MMU
4154 * but does not generate any exception
4159 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
4161 TCGv t0 = tcg_temp_new();
4162 gen_addr_reg_index(t0, ctx);
4163 /* NIP cannot be restored if the memory exception comes from an helper */
4164 gen_update_nip(ctx, ctx->nip - 4);
4165 gen_helper_dcbz(t0);
4169 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
4171 TCGv t0 = tcg_temp_new();
4172 gen_addr_reg_index(t0, ctx);
4173 /* NIP cannot be restored if the memory exception comes from an helper */
4174 gen_update_nip(ctx, ctx->nip - 4);
4175 if (ctx->opcode & 0x00200000)
4176 gen_helper_dcbz(t0);
4178 gen_helper_dcbz_970(t0);
4183 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
4185 TCGv t0 = tcg_temp_new();
4186 /* NIP cannot be restored if the memory exception comes from an helper */
4187 gen_update_nip(ctx, ctx->nip - 4);
4188 gen_addr_reg_index(t0, ctx);
4189 gen_helper_icbi(t0);
4195 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
4197 /* interpreted as no-op */
4198 /* XXX: specification say this is treated as a store by the MMU
4199 * but does not generate any exception
4203 /*** Segment register manipulation ***/
4204 /* Supervisor only: */
4206 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
4208 #if defined(CONFIG_USER_ONLY)
4209 GEN_EXCP_PRIVREG(ctx);
4211 if (unlikely(!ctx->supervisor)) {
4212 GEN_EXCP_PRIVREG(ctx);
4215 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4217 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4222 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
4224 #if defined(CONFIG_USER_ONLY)
4225 GEN_EXCP_PRIVREG(ctx);
4227 if (unlikely(!ctx->supervisor)) {
4228 GEN_EXCP_PRIVREG(ctx);
4231 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4234 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4239 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
4241 #if defined(CONFIG_USER_ONLY)
4242 GEN_EXCP_PRIVREG(ctx);
4244 if (unlikely(!ctx->supervisor)) {
4245 GEN_EXCP_PRIVREG(ctx);
4248 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4249 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4255 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
4257 #if defined(CONFIG_USER_ONLY)
4258 GEN_EXCP_PRIVREG(ctx);
4260 if (unlikely(!ctx->supervisor)) {
4261 GEN_EXCP_PRIVREG(ctx);
4264 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4265 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4271 #if defined(TARGET_PPC64)
4272 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4274 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
4276 #if defined(CONFIG_USER_ONLY)
4277 GEN_EXCP_PRIVREG(ctx);
4279 if (unlikely(!ctx->supervisor)) {
4280 GEN_EXCP_PRIVREG(ctx);
4283 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4285 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4290 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
4293 #if defined(CONFIG_USER_ONLY)
4294 GEN_EXCP_PRIVREG(ctx);
4296 if (unlikely(!ctx->supervisor)) {
4297 GEN_EXCP_PRIVREG(ctx);
4300 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4303 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4308 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
4310 #if defined(CONFIG_USER_ONLY)
4311 GEN_EXCP_PRIVREG(ctx);
4313 if (unlikely(!ctx->supervisor)) {
4314 GEN_EXCP_PRIVREG(ctx);
4317 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4318 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4324 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
4327 #if defined(CONFIG_USER_ONLY)
4328 GEN_EXCP_PRIVREG(ctx);
4330 if (unlikely(!ctx->supervisor)) {
4331 GEN_EXCP_PRIVREG(ctx);
4334 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4335 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4340 #endif /* defined(TARGET_PPC64) */
4342 /*** Lookaside buffer management ***/
4343 /* Optional & supervisor only: */
4345 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
4347 #if defined(CONFIG_USER_ONLY)
4348 GEN_EXCP_PRIVOPC(ctx);
4350 if (unlikely(!ctx->supervisor)) {
4351 GEN_EXCP_PRIVOPC(ctx);
4359 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
4361 #if defined(CONFIG_USER_ONLY)
4362 GEN_EXCP_PRIVOPC(ctx);
4364 if (unlikely(!ctx->supervisor)) {
4365 GEN_EXCP_PRIVOPC(ctx);
4368 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4369 #if defined(TARGET_PPC64)
4379 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
4381 #if defined(CONFIG_USER_ONLY)
4382 GEN_EXCP_PRIVOPC(ctx);
4384 if (unlikely(!ctx->supervisor)) {
4385 GEN_EXCP_PRIVOPC(ctx);
4388 /* This has no effect: it should ensure that all previous
4389 * tlbie have completed
4395 #if defined(TARGET_PPC64)
4397 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
4399 #if defined(CONFIG_USER_ONLY)
4400 GEN_EXCP_PRIVOPC(ctx);
4402 if (unlikely(!ctx->supervisor)) {
4403 GEN_EXCP_PRIVOPC(ctx);
4411 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
4413 #if defined(CONFIG_USER_ONLY)
4414 GEN_EXCP_PRIVOPC(ctx);
4416 if (unlikely(!ctx->supervisor)) {
4417 GEN_EXCP_PRIVOPC(ctx);
4420 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4426 /*** External control ***/
4429 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
4431 /* Should check EAR[E] ! */
4432 TCGv t0 = tcg_temp_new();
4433 gen_set_access_type(ACCESS_RES);
4434 gen_addr_reg_index(t0, ctx);
4435 gen_check_align(ctx, t0, 0x03);
4436 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
4441 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
4443 /* Should check EAR[E] ! */
4444 TCGv t0 = tcg_temp_new();
4445 gen_set_access_type(ACCESS_RES);
4446 gen_addr_reg_index(t0, ctx);
4447 gen_check_align(ctx, t0, 0x03);
4448 gen_qemu_st32(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx);
4452 /* PowerPC 601 specific instructions */
4454 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
4456 int l1 = gen_new_label();
4457 int l2 = gen_new_label();
4458 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4459 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4462 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4464 if (unlikely(Rc(ctx->opcode) != 0))
4465 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4469 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
4471 int l1 = gen_new_label();
4472 int l2 = gen_new_label();
4473 int l3 = gen_new_label();
4474 /* Start with XER OV disabled, the most likely case */
4475 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4476 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4477 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4478 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4481 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4484 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4486 if (unlikely(Rc(ctx->opcode) != 0))
4487 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4491 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
4493 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4494 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], t0);
4495 tcg_temp_free_i32(t0);
4496 /* Rc=1 sets CR0 to an undefined state */
4500 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
4502 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4503 if (unlikely(Rc(ctx->opcode) != 0))
4504 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4508 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
4510 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4511 if (unlikely(Rc(ctx->opcode) != 0))
4512 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4516 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
4518 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4519 if (unlikely(Rc(ctx->opcode) != 0))
4520 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4523 /* divso - divso. */
4524 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
4526 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4527 if (unlikely(Rc(ctx->opcode) != 0))
4528 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4532 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
4534 int l1 = gen_new_label();
4535 int l2 = gen_new_label();
4536 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4537 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4540 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4542 if (unlikely(Rc(ctx->opcode) != 0))
4543 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4547 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
4549 int l1 = gen_new_label();
4550 int l2 = gen_new_label();
4551 TCGv t0 = tcg_temp_new();
4552 TCGv t1 = tcg_temp_new();
4553 TCGv t2 = tcg_temp_new();
4554 /* Start with XER OV disabled, the most likely case */
4555 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4556 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4557 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4558 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4559 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4560 tcg_gen_andc_tl(t1, t1, t2);
4561 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4562 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4563 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4566 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4571 if (unlikely(Rc(ctx->opcode) != 0))
4572 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4576 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4578 target_long simm = SIMM(ctx->opcode);
4579 int l1 = gen_new_label();
4580 int l2 = gen_new_label();
4581 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4582 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4585 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4587 if (unlikely(Rc(ctx->opcode) != 0))
4588 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4591 /* lscbx - lscbx. */
4592 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4594 TCGv t0 = tcg_temp_new();
4595 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4596 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4597 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4599 gen_addr_reg_index(t0, ctx);
4600 /* NIP cannot be restored if the memory exception comes from an helper */
4601 gen_update_nip(ctx, ctx->nip - 4);
4602 gen_helper_lscbx(t0, t0, t1, t2, t3);
4603 tcg_temp_free_i32(t1);
4604 tcg_temp_free_i32(t2);
4605 tcg_temp_free_i32(t3);
4606 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4607 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4608 if (unlikely(Rc(ctx->opcode) != 0))
4609 gen_set_Rc0(ctx, t0);
4613 /* maskg - maskg. */
4614 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4616 int l1 = gen_new_label();
4617 TCGv t0 = tcg_temp_new();
4618 TCGv t1 = tcg_temp_new();
4619 TCGv t2 = tcg_temp_new();
4620 TCGv t3 = tcg_temp_new();
4621 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4622 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4623 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4624 tcg_gen_addi_tl(t2, t0, 1);
4625 tcg_gen_shr_tl(t2, t3, t2);
4626 tcg_gen_shr_tl(t3, t3, t1);
4627 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4628 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4629 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4635 if (unlikely(Rc(ctx->opcode) != 0))
4636 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4639 /* maskir - maskir. */
4640 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4642 TCGv t0 = tcg_temp_new();
4643 TCGv t1 = tcg_temp_new();
4644 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4645 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4646 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4649 if (unlikely(Rc(ctx->opcode) != 0))
4650 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4654 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4656 TCGv_i64 t0 = tcg_temp_new_i64();
4657 TCGv_i64 t1 = tcg_temp_new_i64();
4658 TCGv t2 = tcg_temp_new();
4659 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4660 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4661 tcg_gen_mul_i64(t0, t0, t1);
4662 tcg_gen_trunc_i64_tl(t2, t0);
4663 gen_store_spr(SPR_MQ, t2);
4664 tcg_gen_shri_i64(t1, t0, 32);
4665 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4666 tcg_temp_free_i64(t0);
4667 tcg_temp_free_i64(t1);
4669 if (unlikely(Rc(ctx->opcode) != 0))
4670 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4674 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4676 int l1 = gen_new_label();
4677 TCGv_i64 t0 = tcg_temp_new_i64();
4678 TCGv_i64 t1 = tcg_temp_new_i64();
4679 TCGv t2 = tcg_temp_new();
4680 /* Start with XER OV disabled, the most likely case */
4681 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4682 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4683 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4684 tcg_gen_mul_i64(t0, t0, t1);
4685 tcg_gen_trunc_i64_tl(t2, t0);
4686 gen_store_spr(SPR_MQ, t2);
4687 tcg_gen_shri_i64(t1, t0, 32);
4688 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4689 tcg_gen_ext32s_i64(t1, t0);
4690 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4691 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
4693 tcg_temp_free_i64(t0);
4694 tcg_temp_free_i64(t1);
4696 if (unlikely(Rc(ctx->opcode) != 0))
4697 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4701 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4703 int l1 = gen_new_label();
4704 int l2 = gen_new_label();
4705 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4706 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4709 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4711 if (unlikely(Rc(ctx->opcode) != 0))
4712 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4715 /* nabso - nabso. */
4716 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4718 int l1 = gen_new_label();
4719 int l2 = gen_new_label();
4720 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4721 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4724 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4726 /* nabs never overflows */
4727 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
4728 if (unlikely(Rc(ctx->opcode) != 0))
4729 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4733 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4735 uint32_t mb = MB(ctx->opcode);
4736 uint32_t me = ME(ctx->opcode);
4737 TCGv t0 = tcg_temp_new();
4738 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4739 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4740 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
4741 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
4742 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
4744 if (unlikely(Rc(ctx->opcode) != 0))
4745 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4749 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4751 TCGv t0 = tcg_temp_new();
4752 TCGv t1 = tcg_temp_new();
4753 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4754 tcg_gen_movi_tl(t1, 0x80000000);
4755 tcg_gen_shr_tl(t1, t1, t0);
4756 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4757 tcg_gen_and_tl(t0, t0, t1);
4758 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
4759 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4762 if (unlikely(Rc(ctx->opcode) != 0))
4763 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4767 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4769 TCGv t0 = tcg_temp_new();
4770 TCGv t1 = tcg_temp_new();
4771 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4772 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4773 tcg_gen_subfi_tl(t1, 32, t1);
4774 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4775 tcg_gen_or_tl(t1, t0, t1);
4776 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4777 gen_store_spr(SPR_MQ, t1);
4780 if (unlikely(Rc(ctx->opcode) != 0))
4781 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4785 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4787 TCGv t0 = tcg_temp_new();
4788 TCGv t1 = tcg_temp_new();
4789 TCGv t2 = tcg_temp_new();
4790 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4791 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
4792 tcg_gen_shl_tl(t2, t2, t0);
4793 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4794 gen_load_spr(t1, SPR_MQ);
4795 gen_store_spr(SPR_MQ, t0);
4796 tcg_gen_and_tl(t0, t0, t2);
4797 tcg_gen_andc_tl(t1, t1, t2);
4798 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4802 if (unlikely(Rc(ctx->opcode) != 0))
4803 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4807 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4809 int sh = SH(ctx->opcode);
4810 TCGv t0 = tcg_temp_new();
4811 TCGv t1 = tcg_temp_new();
4812 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4813 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4814 tcg_gen_or_tl(t1, t0, t1);
4815 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4816 gen_store_spr(SPR_MQ, t1);
4819 if (unlikely(Rc(ctx->opcode) != 0))
4820 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4823 /* slliq - slliq. */
4824 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4826 int sh = SH(ctx->opcode);
4827 TCGv t0 = tcg_temp_new();
4828 TCGv t1 = tcg_temp_new();
4829 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4830 gen_load_spr(t1, SPR_MQ);
4831 gen_store_spr(SPR_MQ, t0);
4832 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
4833 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
4834 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4837 if (unlikely(Rc(ctx->opcode) != 0))
4838 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4842 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4844 int l1 = gen_new_label();
4845 int l2 = gen_new_label();
4846 TCGv t0 = tcg_temp_local_new();
4847 TCGv t1 = tcg_temp_local_new();
4848 TCGv t2 = tcg_temp_local_new();
4849 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4850 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4851 tcg_gen_shl_tl(t1, t1, t2);
4852 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4853 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
4854 gen_load_spr(t0, SPR_MQ);
4855 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4858 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4859 gen_load_spr(t2, SPR_MQ);
4860 tcg_gen_andc_tl(t1, t2, t1);
4861 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4866 if (unlikely(Rc(ctx->opcode) != 0))
4867 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4871 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4873 int l1 = gen_new_label();
4874 TCGv t0 = tcg_temp_new();
4875 TCGv t1 = tcg_temp_new();
4876 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4877 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4878 tcg_gen_subfi_tl(t1, 32, t1);
4879 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4880 tcg_gen_or_tl(t1, t0, t1);
4881 gen_store_spr(SPR_MQ, t1);
4882 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
4883 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4884 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4885 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
4889 if (unlikely(Rc(ctx->opcode) != 0))
4890 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4893 /* sraiq - sraiq. */
4894 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4896 int sh = SH(ctx->opcode);
4897 int l1 = gen_new_label();
4898 TCGv t0 = tcg_temp_new();
4899 TCGv t1 = tcg_temp_new();
4900 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
4901 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
4902 tcg_gen_or_tl(t0, t0, t1);
4903 gen_store_spr(SPR_MQ, t0);
4904 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4905 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4906 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
4907 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4909 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
4912 if (unlikely(Rc(ctx->opcode) != 0))
4913 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4917 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4919 int l1 = gen_new_label();
4920 int l2 = gen_new_label();
4921 TCGv t0 = tcg_temp_new();
4922 TCGv t1 = tcg_temp_local_new();
4923 TCGv t2 = tcg_temp_local_new();
4924 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
4925 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
4926 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
4927 tcg_gen_subfi_tl(t2, 32, t2);
4928 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
4929 tcg_gen_or_tl(t0, t0, t2);
4930 gen_store_spr(SPR_MQ, t0);
4931 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
4932 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
4933 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
4934 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
4937 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
4938 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
4939 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4940 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
4941 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_CA));
4945 if (unlikely(Rc(ctx->opcode) != 0))
4946 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4950 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4952 TCGv t0 = tcg_temp_new();
4953 TCGv t1 = tcg_temp_new();
4954 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4955 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4956 tcg_gen_subfi_tl(t1, 32, t1);
4957 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
4958 tcg_gen_or_tl(t1, t0, t1);
4959 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
4960 gen_store_spr(SPR_MQ, t1);
4963 if (unlikely(Rc(ctx->opcode) != 0))
4964 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4968 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4970 TCGv t0 = tcg_temp_new();
4971 TCGv t1 = tcg_temp_new();
4972 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
4973 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
4974 gen_store_spr(SPR_MQ, t0);
4975 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
4978 if (unlikely(Rc(ctx->opcode) != 0))
4979 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4983 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4985 TCGv t0 = tcg_temp_new();
4986 TCGv t1 = tcg_temp_new();
4987 TCGv t2 = tcg_temp_new();
4988 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4989 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
4990 tcg_gen_shr_tl(t1, t1, t0);
4991 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
4992 gen_load_spr(t2, SPR_MQ);
4993 gen_store_spr(SPR_MQ, t0);
4994 tcg_gen_and_tl(t0, t0, t1);
4995 tcg_gen_andc_tl(t2, t2, t1);
4996 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5000 if (unlikely(Rc(ctx->opcode) != 0))
5001 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5005 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
5007 int sh = SH(ctx->opcode);
5008 TCGv t0 = tcg_temp_new();
5009 TCGv t1 = tcg_temp_new();
5010 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5011 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5012 tcg_gen_or_tl(t1, t0, t1);
5013 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5014 gen_store_spr(SPR_MQ, t1);
5017 if (unlikely(Rc(ctx->opcode) != 0))
5018 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5022 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
5024 int sh = SH(ctx->opcode);
5025 TCGv t0 = tcg_temp_new();
5026 TCGv t1 = tcg_temp_new();
5027 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5028 gen_load_spr(t1, SPR_MQ);
5029 gen_store_spr(SPR_MQ, t0);
5030 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5031 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5032 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5035 if (unlikely(Rc(ctx->opcode) != 0))
5036 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5040 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
5042 int l1 = gen_new_label();
5043 int l2 = gen_new_label();
5044 TCGv t0 = tcg_temp_local_new();
5045 TCGv t1 = tcg_temp_local_new();
5046 TCGv t2 = tcg_temp_local_new();
5047 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5048 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5049 tcg_gen_shr_tl(t2, t1, t2);
5050 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5051 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5052 gen_load_spr(t0, SPR_MQ);
5053 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5056 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5057 tcg_gen_and_tl(t0, t0, t2);
5058 gen_load_spr(t1, SPR_MQ);
5059 tcg_gen_andc_tl(t1, t1, t2);
5060 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5065 if (unlikely(Rc(ctx->opcode) != 0))
5066 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5070 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
5072 int l1 = gen_new_label();
5073 TCGv t0 = tcg_temp_new();
5074 TCGv t1 = tcg_temp_new();
5075 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5076 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5077 tcg_gen_subfi_tl(t1, 32, t1);
5078 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5079 tcg_gen_or_tl(t1, t0, t1);
5080 gen_store_spr(SPR_MQ, t1);
5081 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5082 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5083 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5084 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5088 if (unlikely(Rc(ctx->opcode) != 0))
5089 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5092 /* PowerPC 602 specific instructions */
5094 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
5097 GEN_EXCP_INVAL(ctx);
5101 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
5104 GEN_EXCP_INVAL(ctx);
5108 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
5110 #if defined(CONFIG_USER_ONLY)
5111 GEN_EXCP_PRIVOPC(ctx);
5113 if (unlikely(!ctx->supervisor)) {
5114 GEN_EXCP_PRIVOPC(ctx);
5117 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5121 /* 602 - 603 - G2 TLB management */
5123 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
5125 #if defined(CONFIG_USER_ONLY)
5126 GEN_EXCP_PRIVOPC(ctx);
5128 if (unlikely(!ctx->supervisor)) {
5129 GEN_EXCP_PRIVOPC(ctx);
5132 gen_helper_load_6xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5137 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
5139 #if defined(CONFIG_USER_ONLY)
5140 GEN_EXCP_PRIVOPC(ctx);
5142 if (unlikely(!ctx->supervisor)) {
5143 GEN_EXCP_PRIVOPC(ctx);
5146 gen_helper_load_6xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5150 /* 74xx TLB management */
5152 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
5154 #if defined(CONFIG_USER_ONLY)
5155 GEN_EXCP_PRIVOPC(ctx);
5157 if (unlikely(!ctx->supervisor)) {
5158 GEN_EXCP_PRIVOPC(ctx);
5161 gen_helper_load_74xx_tlbd(cpu_gpr[rB(ctx->opcode)]);
5166 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
5168 #if defined(CONFIG_USER_ONLY)
5169 GEN_EXCP_PRIVOPC(ctx);
5171 if (unlikely(!ctx->supervisor)) {
5172 GEN_EXCP_PRIVOPC(ctx);
5175 gen_helper_load_74xx_tlbi(cpu_gpr[rB(ctx->opcode)]);
5179 /* POWER instructions not in PowerPC 601 */
5181 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
5183 /* Cache line flush: implemented as no-op */
5187 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
5189 /* Cache line invalidate: privileged and treated as no-op */
5190 #if defined(CONFIG_USER_ONLY)
5191 GEN_EXCP_PRIVOPC(ctx);
5193 if (unlikely(!ctx->supervisor)) {
5194 GEN_EXCP_PRIVOPC(ctx);
5201 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
5203 /* Data cache line store: treated as no-op */
5206 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
5208 #if defined(CONFIG_USER_ONLY)
5209 GEN_EXCP_PRIVOPC(ctx);
5211 if (unlikely(!ctx->supervisor)) {
5212 GEN_EXCP_PRIVOPC(ctx);
5215 int ra = rA(ctx->opcode);
5216 int rd = rD(ctx->opcode);
5218 gen_addr_reg_index(cpu_T[0], ctx);
5219 gen_op_POWER_mfsri();
5220 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
5221 if (ra != 0 && ra != rd)
5222 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
5226 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
5228 #if defined(CONFIG_USER_ONLY)
5229 GEN_EXCP_PRIVOPC(ctx);
5232 if (unlikely(!ctx->supervisor)) {
5233 GEN_EXCP_PRIVOPC(ctx);
5236 t0 = tcg_temp_new();
5237 gen_addr_reg_index(t0, ctx);
5238 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], t0);
5243 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
5245 #if defined(CONFIG_USER_ONLY)
5246 GEN_EXCP_PRIVOPC(ctx);
5248 if (unlikely(!ctx->supervisor)) {
5249 GEN_EXCP_PRIVOPC(ctx);
5257 /* svc is not implemented for now */
5259 /* POWER2 specific instructions */
5260 /* Quad manipulation (load/store two floats at a time) */
5263 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5265 int rd = rD(ctx->opcode);
5266 TCGv t0 = tcg_temp_new();
5267 gen_addr_imm_index(t0, ctx, 0);
5268 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5269 tcg_gen_addi_tl(t0, t0, 8);
5270 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5275 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5277 int ra = rA(ctx->opcode);
5278 int rd = rD(ctx->opcode);
5279 TCGv t0 = tcg_temp_new();
5280 TCGv t1 = tcg_temp_new();
5281 gen_addr_imm_index(t0, ctx, 0);
5282 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5283 tcg_gen_addi_tl(t1, t0, 8);
5284 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5286 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5292 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
5294 int ra = rA(ctx->opcode);
5295 int rd = rD(ctx->opcode);
5296 TCGv t0 = tcg_temp_new();
5297 TCGv t1 = tcg_temp_new();
5298 gen_addr_reg_index(t0, ctx);
5299 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5300 tcg_gen_addi_tl(t1, t0, 8);
5301 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5303 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5309 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
5311 int rd = rD(ctx->opcode);
5312 TCGv t0 = tcg_temp_new();
5313 gen_addr_reg_index(t0, ctx);
5314 gen_qemu_ld64(cpu_fpr[rd], t0, ctx->mem_idx);
5315 tcg_gen_addi_tl(t0, t0, 8);
5316 gen_qemu_ld64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5321 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5323 int rd = rD(ctx->opcode);
5324 TCGv t0 = tcg_temp_new();
5325 gen_addr_imm_index(t0, ctx, 0);
5326 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5327 tcg_gen_addi_tl(t0, t0, 8);
5328 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5333 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5335 int ra = rA(ctx->opcode);
5336 int rd = rD(ctx->opcode);
5337 TCGv t0 = tcg_temp_new();
5338 TCGv t1 = tcg_temp_new();
5339 gen_addr_imm_index(t0, ctx, 0);
5340 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5341 tcg_gen_addi_tl(t1, t0, 8);
5342 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5344 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5350 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
5352 int ra = rA(ctx->opcode);
5353 int rd = rD(ctx->opcode);
5354 TCGv t0 = tcg_temp_new();
5355 TCGv t1 = tcg_temp_new();
5356 gen_addr_reg_index(t0, ctx);
5357 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5358 tcg_gen_addi_tl(t1, t0, 8);
5359 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t1, ctx->mem_idx);
5361 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5367 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
5369 int rd = rD(ctx->opcode);
5370 TCGv t0 = tcg_temp_new();
5371 gen_addr_reg_index(t0, ctx);
5372 gen_qemu_st64(cpu_fpr[rd], t0, ctx->mem_idx);
5373 tcg_gen_addi_tl(t0, t0, 8);
5374 gen_qemu_st64(cpu_fpr[(rd + 1) % 32], t0, ctx->mem_idx);
5378 /* BookE specific instructions */
5379 /* XXX: not implemented on 440 ? */
5380 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
5383 GEN_EXCP_INVAL(ctx);
5386 /* XXX: not implemented on 440 ? */
5387 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
5389 #if defined(CONFIG_USER_ONLY)
5390 GEN_EXCP_PRIVOPC(ctx);
5392 if (unlikely(!ctx->supervisor)) {
5393 GEN_EXCP_PRIVOPC(ctx);
5396 gen_addr_reg_index(cpu_T[0], ctx);
5397 /* Use the same micro-ops as for tlbie */
5398 #if defined(TARGET_PPC64)
5407 /* All 405 MAC instructions are translated here */
5408 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
5410 int ra, int rb, int rt, int Rc)
5414 t0 = tcg_temp_local_new();
5415 t1 = tcg_temp_local_new();
5417 switch (opc3 & 0x0D) {
5419 /* macchw - macchw. - macchwo - macchwo. */
5420 /* macchws - macchws. - macchwso - macchwso. */
5421 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5422 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5423 /* mulchw - mulchw. */
5424 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5425 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5426 tcg_gen_ext16s_tl(t1, t1);
5429 /* macchwu - macchwu. - macchwuo - macchwuo. */
5430 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5431 /* mulchwu - mulchwu. */
5432 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5433 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5434 tcg_gen_ext16u_tl(t1, t1);
5437 /* machhw - machhw. - machhwo - machhwo. */
5438 /* machhws - machhws. - machhwso - machhwso. */
5439 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5440 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5441 /* mulhhw - mulhhw. */
5442 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5443 tcg_gen_ext16s_tl(t0, t0);
5444 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5445 tcg_gen_ext16s_tl(t1, t1);
5448 /* machhwu - machhwu. - machhwuo - machhwuo. */
5449 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5450 /* mulhhwu - mulhhwu. */
5451 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5452 tcg_gen_ext16u_tl(t0, t0);
5453 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5454 tcg_gen_ext16u_tl(t1, t1);
5457 /* maclhw - maclhw. - maclhwo - maclhwo. */
5458 /* maclhws - maclhws. - maclhwso - maclhwso. */
5459 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5460 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5461 /* mullhw - mullhw. */
5462 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5463 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5466 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5467 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5468 /* mullhwu - mullhwu. */
5469 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5470 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5474 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5475 tcg_gen_mul_tl(t1, t0, t1);
5477 /* nmultiply-and-accumulate (0x0E) */
5478 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5480 /* multiply-and-accumulate (0x0C) */
5481 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5485 /* Check overflow and/or saturate */
5486 int l1 = gen_new_label();
5489 /* Start with XER OV disabled, the most likely case */
5490 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5494 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5495 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5496 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5497 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5500 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5501 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5505 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5508 tcg_gen_movi_tl(t0, UINT32_MAX);
5512 /* Check overflow */
5513 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5516 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5519 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5523 if (unlikely(Rc) != 0) {
5525 gen_set_Rc0(ctx, cpu_gpr[rt]);
5529 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5530 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
5532 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5533 rD(ctx->opcode), Rc(ctx->opcode)); \
5536 /* macchw - macchw. */
5537 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5538 /* macchwo - macchwo. */
5539 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5540 /* macchws - macchws. */
5541 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5542 /* macchwso - macchwso. */
5543 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5544 /* macchwsu - macchwsu. */
5545 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5546 /* macchwsuo - macchwsuo. */
5547 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5548 /* macchwu - macchwu. */
5549 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5550 /* macchwuo - macchwuo. */
5551 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5552 /* machhw - machhw. */
5553 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5554 /* machhwo - machhwo. */
5555 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5556 /* machhws - machhws. */
5557 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5558 /* machhwso - machhwso. */
5559 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5560 /* machhwsu - machhwsu. */
5561 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5562 /* machhwsuo - machhwsuo. */
5563 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5564 /* machhwu - machhwu. */
5565 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5566 /* machhwuo - machhwuo. */
5567 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5568 /* maclhw - maclhw. */
5569 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5570 /* maclhwo - maclhwo. */
5571 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5572 /* maclhws - maclhws. */
5573 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5574 /* maclhwso - maclhwso. */
5575 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5576 /* maclhwu - maclhwu. */
5577 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5578 /* maclhwuo - maclhwuo. */
5579 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5580 /* maclhwsu - maclhwsu. */
5581 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5582 /* maclhwsuo - maclhwsuo. */
5583 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5584 /* nmacchw - nmacchw. */
5585 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5586 /* nmacchwo - nmacchwo. */
5587 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5588 /* nmacchws - nmacchws. */
5589 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5590 /* nmacchwso - nmacchwso. */
5591 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5592 /* nmachhw - nmachhw. */
5593 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5594 /* nmachhwo - nmachhwo. */
5595 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5596 /* nmachhws - nmachhws. */
5597 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5598 /* nmachhwso - nmachhwso. */
5599 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5600 /* nmaclhw - nmaclhw. */
5601 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5602 /* nmaclhwo - nmaclhwo. */
5603 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5604 /* nmaclhws - nmaclhws. */
5605 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5606 /* nmaclhwso - nmaclhwso. */
5607 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5609 /* mulchw - mulchw. */
5610 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5611 /* mulchwu - mulchwu. */
5612 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5613 /* mulhhw - mulhhw. */
5614 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5615 /* mulhhwu - mulhhwu. */
5616 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5617 /* mullhw - mullhw. */
5618 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5619 /* mullhwu - mullhwu. */
5620 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5623 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
5625 #if defined(CONFIG_USER_ONLY)
5626 GEN_EXCP_PRIVREG(ctx);
5628 uint32_t dcrn = SPR(ctx->opcode);
5630 if (unlikely(!ctx->supervisor)) {
5631 GEN_EXCP_PRIVREG(ctx);
5634 tcg_gen_movi_tl(cpu_T[0], dcrn);
5636 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5641 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
5643 #if defined(CONFIG_USER_ONLY)
5644 GEN_EXCP_PRIVREG(ctx);
5646 uint32_t dcrn = SPR(ctx->opcode);
5648 if (unlikely(!ctx->supervisor)) {
5649 GEN_EXCP_PRIVREG(ctx);
5652 tcg_gen_movi_tl(cpu_T[0], dcrn);
5653 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5659 /* XXX: not implemented on 440 ? */
5660 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
5662 #if defined(CONFIG_USER_ONLY)
5663 GEN_EXCP_PRIVREG(ctx);
5665 if (unlikely(!ctx->supervisor)) {
5666 GEN_EXCP_PRIVREG(ctx);
5669 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5671 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5672 /* Note: Rc update flag set leads to undefined state of Rc0 */
5677 /* XXX: not implemented on 440 ? */
5678 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
5680 #if defined(CONFIG_USER_ONLY)
5681 GEN_EXCP_PRIVREG(ctx);
5683 if (unlikely(!ctx->supervisor)) {
5684 GEN_EXCP_PRIVREG(ctx);
5687 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5688 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5690 /* Note: Rc update flag set leads to undefined state of Rc0 */
5694 /* mfdcrux (PPC 460) : user-mode access to DCR */
5695 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
5697 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5699 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5700 /* Note: Rc update flag set leads to undefined state of Rc0 */
5703 /* mtdcrux (PPC 460) : user-mode access to DCR */
5704 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
5706 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5707 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5709 /* Note: Rc update flag set leads to undefined state of Rc0 */
5713 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
5715 #if defined(CONFIG_USER_ONLY)
5716 GEN_EXCP_PRIVOPC(ctx);
5718 if (unlikely(!ctx->supervisor)) {
5719 GEN_EXCP_PRIVOPC(ctx);
5722 /* interpreted as no-op */
5727 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
5729 #if defined(CONFIG_USER_ONLY)
5730 GEN_EXCP_PRIVOPC(ctx);
5733 if (unlikely(!ctx->supervisor)) {
5734 GEN_EXCP_PRIVOPC(ctx);
5737 EA = tcg_temp_new();
5738 gen_set_access_type(ACCESS_CACHE);
5739 gen_addr_reg_index(EA, ctx);
5740 val = tcg_temp_new();
5741 gen_qemu_ld32u(val, EA, ctx->mem_idx);
5743 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5749 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
5751 /* interpreted as no-op */
5752 /* XXX: specification say this is treated as a load by the MMU
5753 * but does not generate any exception
5758 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
5760 #if defined(CONFIG_USER_ONLY)
5761 GEN_EXCP_PRIVOPC(ctx);
5763 if (unlikely(!ctx->supervisor)) {
5764 GEN_EXCP_PRIVOPC(ctx);
5767 /* interpreted as no-op */
5772 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
5774 #if defined(CONFIG_USER_ONLY)
5775 GEN_EXCP_PRIVOPC(ctx);
5777 if (unlikely(!ctx->supervisor)) {
5778 GEN_EXCP_PRIVOPC(ctx);
5781 /* interpreted as no-op */
5785 /* rfci (supervisor only) */
5786 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
5788 #if defined(CONFIG_USER_ONLY)
5789 GEN_EXCP_PRIVOPC(ctx);
5791 if (unlikely(!ctx->supervisor)) {
5792 GEN_EXCP_PRIVOPC(ctx);
5795 /* Restore CPU state */
5796 gen_helper_40x_rfci();
5801 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
5803 #if defined(CONFIG_USER_ONLY)
5804 GEN_EXCP_PRIVOPC(ctx);
5806 if (unlikely(!ctx->supervisor)) {
5807 GEN_EXCP_PRIVOPC(ctx);
5810 /* Restore CPU state */
5816 /* BookE specific */
5817 /* XXX: not implemented on 440 ? */
5818 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
5820 #if defined(CONFIG_USER_ONLY)
5821 GEN_EXCP_PRIVOPC(ctx);
5823 if (unlikely(!ctx->supervisor)) {
5824 GEN_EXCP_PRIVOPC(ctx);
5827 /* Restore CPU state */
5833 /* XXX: not implemented on 440 ? */
5834 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5836 #if defined(CONFIG_USER_ONLY)
5837 GEN_EXCP_PRIVOPC(ctx);
5839 if (unlikely(!ctx->supervisor)) {
5840 GEN_EXCP_PRIVOPC(ctx);
5843 /* Restore CPU state */
5849 /* TLB management - PowerPC 405 implementation */
5851 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5853 #if defined(CONFIG_USER_ONLY)
5854 GEN_EXCP_PRIVOPC(ctx);
5856 if (unlikely(!ctx->supervisor)) {
5857 GEN_EXCP_PRIVOPC(ctx);
5860 switch (rB(ctx->opcode)) {
5862 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5863 gen_op_4xx_tlbre_hi();
5864 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5867 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5868 gen_op_4xx_tlbre_lo();
5869 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5872 GEN_EXCP_INVAL(ctx);
5878 /* tlbsx - tlbsx. */
5879 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5881 #if defined(CONFIG_USER_ONLY)
5882 GEN_EXCP_PRIVOPC(ctx);
5884 if (unlikely(!ctx->supervisor)) {
5885 GEN_EXCP_PRIVOPC(ctx);
5888 gen_addr_reg_index(cpu_T[0], ctx);
5890 if (Rc(ctx->opcode))
5891 gen_op_4xx_tlbsx_check();
5892 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5897 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5899 #if defined(CONFIG_USER_ONLY)
5900 GEN_EXCP_PRIVOPC(ctx);
5902 if (unlikely(!ctx->supervisor)) {
5903 GEN_EXCP_PRIVOPC(ctx);
5906 switch (rB(ctx->opcode)) {
5908 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5909 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5910 gen_op_4xx_tlbwe_hi();
5913 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5914 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5915 gen_op_4xx_tlbwe_lo();
5918 GEN_EXCP_INVAL(ctx);
5924 /* TLB management - PowerPC 440 implementation */
5926 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5928 #if defined(CONFIG_USER_ONLY)
5929 GEN_EXCP_PRIVOPC(ctx);
5931 if (unlikely(!ctx->supervisor)) {
5932 GEN_EXCP_PRIVOPC(ctx);
5935 switch (rB(ctx->opcode)) {
5939 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5940 gen_op_440_tlbre(rB(ctx->opcode));
5941 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5944 GEN_EXCP_INVAL(ctx);
5950 /* tlbsx - tlbsx. */
5951 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5953 #if defined(CONFIG_USER_ONLY)
5954 GEN_EXCP_PRIVOPC(ctx);
5956 if (unlikely(!ctx->supervisor)) {
5957 GEN_EXCP_PRIVOPC(ctx);
5960 gen_addr_reg_index(cpu_T[0], ctx);
5962 if (Rc(ctx->opcode))
5963 gen_op_4xx_tlbsx_check();
5964 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5969 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5971 #if defined(CONFIG_USER_ONLY)
5972 GEN_EXCP_PRIVOPC(ctx);
5974 if (unlikely(!ctx->supervisor)) {
5975 GEN_EXCP_PRIVOPC(ctx);
5978 switch (rB(ctx->opcode)) {
5982 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5983 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5984 gen_op_440_tlbwe(rB(ctx->opcode));
5987 GEN_EXCP_INVAL(ctx);
5994 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5996 #if defined(CONFIG_USER_ONLY)
5997 GEN_EXCP_PRIVOPC(ctx);
6000 if (unlikely(!ctx->supervisor)) {
6001 GEN_EXCP_PRIVOPC(ctx);
6004 t0 = tcg_temp_new();
6005 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6006 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6007 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6009 /* Stop translation to have a chance to raise an exception
6010 * if we just set msr_ee to 1
6017 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
6019 #if defined(CONFIG_USER_ONLY)
6020 GEN_EXCP_PRIVOPC(ctx);
6022 if (unlikely(!ctx->supervisor)) {
6023 GEN_EXCP_PRIVOPC(ctx);
6026 if (ctx->opcode & 0x00010000) {
6027 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6028 /* Stop translation to have a chance to raise an exception */
6031 tcg_gen_andi_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6036 /* PowerPC 440 specific instructions */
6038 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
6040 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6041 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
6042 cpu_gpr[rB(ctx->opcode)], t0);
6043 tcg_temp_free_i32(t0);
6046 /* mbar replaces eieio on 440 */
6047 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
6049 /* interpreted as no-op */
6052 /* msync replaces sync on 440 */
6053 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
6055 /* interpreted as no-op */
6059 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
6061 /* interpreted as no-op */
6062 /* XXX: specification say this is treated as a load by the MMU
6063 * but does not generate any exception
6067 /*** Altivec vector extension ***/
6068 /* Altivec registers moves */
6070 #define GEN_VR_LDX(name, opc2, opc3) \
6071 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
6074 if (unlikely(!ctx->altivec_enabled)) { \
6075 GEN_EXCP_NO_VR(ctx); \
6078 EA = tcg_temp_new(); \
6079 gen_addr_reg_index(EA, ctx); \
6080 tcg_gen_andi_tl(EA, EA, ~0xf); \
6081 if (ctx->mem_idx & 1) { \
6082 gen_qemu_ld64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6083 tcg_gen_addi_tl(EA, EA, 8); \
6084 gen_qemu_ld64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6086 gen_qemu_ld64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6087 tcg_gen_addi_tl(EA, EA, 8); \
6088 gen_qemu_ld64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6090 tcg_temp_free(EA); \
6093 #define GEN_VR_STX(name, opc2, opc3) \
6094 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
6097 if (unlikely(!ctx->altivec_enabled)) { \
6098 GEN_EXCP_NO_VR(ctx); \
6101 EA = tcg_temp_new(); \
6102 gen_addr_reg_index(EA, ctx); \
6103 tcg_gen_andi_tl(EA, EA, ~0xf); \
6104 if (ctx->mem_idx & 1) { \
6105 gen_qemu_st64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6106 tcg_gen_addi_tl(EA, EA, 8); \
6107 gen_qemu_st64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6109 gen_qemu_st64(cpu_avrh[rD(ctx->opcode)], EA, ctx->mem_idx); \
6110 tcg_gen_addi_tl(EA, EA, 8); \
6111 gen_qemu_st64(cpu_avrl[rD(ctx->opcode)], EA, ctx->mem_idx); \
6113 tcg_temp_free(EA); \
6116 GEN_VR_LDX(lvx, 0x07, 0x03);
6117 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6118 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6120 GEN_VR_STX(svx, 0x07, 0x07);
6121 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6122 GEN_VR_STX(svxl, 0x07, 0x0F);
6124 /*** SPE extension ***/
6125 /* Register moves */
6127 static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) {
6128 #if defined(TARGET_PPC64)
6129 tcg_gen_mov_i64(t, cpu_gpr[reg]);
6131 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
6135 static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) {
6136 #if defined(TARGET_PPC64)
6137 tcg_gen_mov_i64(cpu_gpr[reg], t);
6139 TCGv_i64 tmp = tcg_temp_new_i64();
6140 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
6141 tcg_gen_shri_i64(tmp, t, 32);
6142 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
6143 tcg_temp_free_i64(tmp);
6147 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
6148 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
6150 if (Rc(ctx->opcode)) \
6156 /* Handler for undefined SPE opcodes */
6157 static always_inline void gen_speundef (DisasContext *ctx)
6159 GEN_EXCP_INVAL(ctx);
6163 #if defined(TARGET_PPC64)
6164 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6165 static always_inline void gen_##name (DisasContext *ctx) \
6167 if (unlikely(!ctx->spe_enabled)) { \
6168 GEN_EXCP_NO_AP(ctx); \
6171 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6172 cpu_gpr[rB(ctx->opcode)]); \
6175 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6176 static always_inline void gen_##name (DisasContext *ctx) \
6178 if (unlikely(!ctx->spe_enabled)) { \
6179 GEN_EXCP_NO_AP(ctx); \
6182 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6183 cpu_gpr[rB(ctx->opcode)]); \
6184 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6185 cpu_gprh[rB(ctx->opcode)]); \
6189 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6190 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6191 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6192 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6193 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6194 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6195 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6196 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6198 /* SPE logic immediate */
6199 #if defined(TARGET_PPC64)
6200 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6201 static always_inline void gen_##name (DisasContext *ctx) \
6203 if (unlikely(!ctx->spe_enabled)) { \
6204 GEN_EXCP_NO_AP(ctx); \
6207 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6208 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6209 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6210 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6211 tcg_opi(t0, t0, rB(ctx->opcode)); \
6212 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6213 tcg_gen_trunc_i64_i32(t1, t2); \
6214 tcg_temp_free_i64(t2); \
6215 tcg_opi(t1, t1, rB(ctx->opcode)); \
6216 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6217 tcg_temp_free_i32(t0); \
6218 tcg_temp_free_i32(t1); \
6221 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6222 static always_inline void gen_##name (DisasContext *ctx) \
6224 if (unlikely(!ctx->spe_enabled)) { \
6225 GEN_EXCP_NO_AP(ctx); \
6228 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6230 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6234 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6235 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6236 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6237 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6239 /* SPE arithmetic */
6240 #if defined(TARGET_PPC64)
6241 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6242 static always_inline void gen_##name (DisasContext *ctx) \
6244 if (unlikely(!ctx->spe_enabled)) { \
6245 GEN_EXCP_NO_AP(ctx); \
6248 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6249 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6250 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6251 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6253 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6254 tcg_gen_trunc_i64_i32(t1, t2); \
6255 tcg_temp_free_i64(t2); \
6257 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6258 tcg_temp_free_i32(t0); \
6259 tcg_temp_free_i32(t1); \
6262 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6263 static always_inline void gen_##name (DisasContext *ctx) \
6265 if (unlikely(!ctx->spe_enabled)) { \
6266 GEN_EXCP_NO_AP(ctx); \
6269 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6270 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6274 static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1)
6276 int l1 = gen_new_label();
6277 int l2 = gen_new_label();
6279 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6280 tcg_gen_neg_i32(ret, arg1);
6283 tcg_gen_mov_i32(ret, arg1);
6286 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6287 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6288 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6289 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6290 static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1)
6292 tcg_gen_addi_i32(ret, arg1, 0x8000);
6293 tcg_gen_ext16u_i32(ret, ret);
6295 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6296 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6297 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6299 #if defined(TARGET_PPC64)
6300 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6301 static always_inline void gen_##name (DisasContext *ctx) \
6303 if (unlikely(!ctx->spe_enabled)) { \
6304 GEN_EXCP_NO_AP(ctx); \
6307 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6308 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6309 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6310 TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64); \
6311 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6312 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6313 tcg_op(t0, t0, t2); \
6314 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6315 tcg_gen_trunc_i64_i32(t1, t3); \
6316 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6317 tcg_gen_trunc_i64_i32(t2, t3); \
6318 tcg_temp_free_i64(t3); \
6319 tcg_op(t1, t1, t2); \
6320 tcg_temp_free_i32(t2); \
6321 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6322 tcg_temp_free_i32(t0); \
6323 tcg_temp_free_i32(t1); \
6326 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6327 static always_inline void gen_##name (DisasContext *ctx) \
6329 if (unlikely(!ctx->spe_enabled)) { \
6330 GEN_EXCP_NO_AP(ctx); \
6333 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6334 cpu_gpr[rB(ctx->opcode)]); \
6335 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6336 cpu_gprh[rB(ctx->opcode)]); \
6340 static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6345 l1 = gen_new_label();
6346 l2 = gen_new_label();
6347 t0 = tcg_temp_local_new_i32();
6348 /* No error here: 6 bits are used */
6349 tcg_gen_andi_i32(t0, arg2, 0x3F);
6350 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6351 tcg_gen_shr_i32(ret, arg1, t0);
6354 tcg_gen_movi_i32(ret, 0);
6356 tcg_temp_free_i32(t0);
6358 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6359 static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6364 l1 = gen_new_label();
6365 l2 = gen_new_label();
6366 t0 = tcg_temp_local_new_i32();
6367 /* No error here: 6 bits are used */
6368 tcg_gen_andi_i32(t0, arg2, 0x3F);
6369 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6370 tcg_gen_sar_i32(ret, arg1, t0);
6373 tcg_gen_movi_i32(ret, 0);
6375 tcg_temp_free_i32(t0);
6377 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6378 static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6383 l1 = gen_new_label();
6384 l2 = gen_new_label();
6385 t0 = tcg_temp_local_new_i32();
6386 /* No error here: 6 bits are used */
6387 tcg_gen_andi_i32(t0, arg2, 0x3F);
6388 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6389 tcg_gen_shl_i32(ret, arg1, t0);
6392 tcg_gen_movi_i32(ret, 0);
6394 tcg_temp_free_i32(t0);
6396 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6397 static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6399 TCGv_i32 t0 = tcg_temp_new_i32();
6400 tcg_gen_andi_i32(t0, arg2, 0x1F);
6401 tcg_gen_rotl_i32(ret, arg1, t0);
6402 tcg_temp_free_i32(t0);
6404 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6405 static always_inline void gen_evmergehi (DisasContext *ctx)
6407 if (unlikely(!ctx->spe_enabled)) {
6408 GEN_EXCP_NO_AP(ctx);
6411 #if defined(TARGET_PPC64)
6412 TCGv t0 = tcg_temp_new();
6413 TCGv t1 = tcg_temp_new();
6414 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6415 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6416 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6420 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6421 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6424 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6425 static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6427 tcg_gen_sub_i32(ret, arg2, arg1);
6429 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6431 /* SPE arithmetic immediate */
6432 #if defined(TARGET_PPC64)
6433 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6434 static always_inline void gen_##name (DisasContext *ctx) \
6436 if (unlikely(!ctx->spe_enabled)) { \
6437 GEN_EXCP_NO_AP(ctx); \
6440 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6441 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6442 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6443 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6444 tcg_op(t0, t0, rA(ctx->opcode)); \
6445 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6446 tcg_gen_trunc_i64_i32(t1, t2); \
6447 tcg_temp_free_i64(t2); \
6448 tcg_op(t1, t1, rA(ctx->opcode)); \
6449 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6450 tcg_temp_free_i32(t0); \
6451 tcg_temp_free_i32(t1); \
6454 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6455 static always_inline void gen_##name (DisasContext *ctx) \
6457 if (unlikely(!ctx->spe_enabled)) { \
6458 GEN_EXCP_NO_AP(ctx); \
6461 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6463 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6467 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6468 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6470 /* SPE comparison */
6471 #if defined(TARGET_PPC64)
6472 #define GEN_SPEOP_COMP(name, tcg_cond) \
6473 static always_inline void gen_##name (DisasContext *ctx) \
6475 if (unlikely(!ctx->spe_enabled)) { \
6476 GEN_EXCP_NO_AP(ctx); \
6479 int l1 = gen_new_label(); \
6480 int l2 = gen_new_label(); \
6481 int l3 = gen_new_label(); \
6482 int l4 = gen_new_label(); \
6483 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6484 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6485 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6486 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6487 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
6488 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
6489 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
6491 gen_set_label(l1); \
6492 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6493 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6494 gen_set_label(l2); \
6495 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6496 tcg_gen_trunc_i64_i32(t0, t2); \
6497 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6498 tcg_gen_trunc_i64_i32(t1, t2); \
6499 tcg_temp_free_i64(t2); \
6500 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
6501 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6502 ~(CRF_CH | CRF_CH_AND_CL)); \
6504 gen_set_label(l3); \
6505 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6506 CRF_CH | CRF_CH_OR_CL); \
6507 gen_set_label(l4); \
6508 tcg_temp_free_i32(t0); \
6509 tcg_temp_free_i32(t1); \
6512 #define GEN_SPEOP_COMP(name, tcg_cond) \
6513 static always_inline void gen_##name (DisasContext *ctx) \
6515 if (unlikely(!ctx->spe_enabled)) { \
6516 GEN_EXCP_NO_AP(ctx); \
6519 int l1 = gen_new_label(); \
6520 int l2 = gen_new_label(); \
6521 int l3 = gen_new_label(); \
6522 int l4 = gen_new_label(); \
6524 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
6525 cpu_gpr[rB(ctx->opcode)], l1); \
6526 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
6528 gen_set_label(l1); \
6529 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6530 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6531 gen_set_label(l2); \
6532 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
6533 cpu_gprh[rB(ctx->opcode)], l3); \
6534 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6535 ~(CRF_CH | CRF_CH_AND_CL)); \
6537 gen_set_label(l3); \
6538 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6539 CRF_CH | CRF_CH_OR_CL); \
6540 gen_set_label(l4); \
6543 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
6544 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
6545 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
6546 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
6547 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
6550 static always_inline void gen_brinc (DisasContext *ctx)
6552 /* Note: brinc is usable even if SPE is disabled */
6553 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
6554 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6556 static always_inline void gen_evmergelo (DisasContext *ctx)
6558 if (unlikely(!ctx->spe_enabled)) {
6559 GEN_EXCP_NO_AP(ctx);
6562 #if defined(TARGET_PPC64)
6563 TCGv t0 = tcg_temp_new();
6564 TCGv t1 = tcg_temp_new();
6565 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6566 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6567 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6571 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6572 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6575 static always_inline void gen_evmergehilo (DisasContext *ctx)
6577 if (unlikely(!ctx->spe_enabled)) {
6578 GEN_EXCP_NO_AP(ctx);
6581 #if defined(TARGET_PPC64)
6582 TCGv t0 = tcg_temp_new();
6583 TCGv t1 = tcg_temp_new();
6584 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6585 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6586 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6590 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6591 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6594 static always_inline void gen_evmergelohi (DisasContext *ctx)
6596 if (unlikely(!ctx->spe_enabled)) {
6597 GEN_EXCP_NO_AP(ctx);
6600 #if defined(TARGET_PPC64)
6601 TCGv t0 = tcg_temp_new();
6602 TCGv t1 = tcg_temp_new();
6603 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6604 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6605 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6609 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6610 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6613 static always_inline void gen_evsplati (DisasContext *ctx)
6615 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 11)) >> 27;
6617 #if defined(TARGET_PPC64)
6618 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
6620 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6621 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6624 static always_inline void gen_evsplatfi (DisasContext *ctx)
6626 uint64_t imm = rA(ctx->opcode) << 11;
6628 #if defined(TARGET_PPC64)
6629 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
6631 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6632 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6636 static always_inline void gen_evsel (DisasContext *ctx)
6638 int l1 = gen_new_label();
6639 int l2 = gen_new_label();
6640 int l3 = gen_new_label();
6641 int l4 = gen_new_label();
6642 TCGv_i32 t0 = tcg_temp_local_new_i32();
6643 #if defined(TARGET_PPC64)
6644 TCGv t1 = tcg_temp_local_new();
6645 TCGv t2 = tcg_temp_local_new();
6647 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
6648 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
6649 #if defined(TARGET_PPC64)
6650 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6652 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6656 #if defined(TARGET_PPC64)
6657 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6659 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6662 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
6663 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
6664 #if defined(TARGET_PPC64)
6665 tcg_gen_andi_tl(t2, cpu_gpr[rA(ctx->opcode)], 0x00000000FFFFFFFFULL);
6667 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6671 #if defined(TARGET_PPC64)
6672 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFULL);
6674 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6677 tcg_temp_free_i32(t0);
6678 #if defined(TARGET_PPC64)
6679 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
6684 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
6688 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
6692 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
6696 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
6701 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
6702 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
6703 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
6704 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
6705 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
6706 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
6707 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
6708 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
6709 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
6710 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
6711 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
6712 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
6713 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
6714 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
6715 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
6716 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
6717 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
6718 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
6719 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
6720 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
6721 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
6722 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
6723 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
6724 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
6725 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
6727 /* SPE load and stores */
6728 static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh)
6730 target_ulong uimm = rB(ctx->opcode);
6732 if (rA(ctx->opcode) == 0)
6733 tcg_gen_movi_tl(EA, uimm << sh);
6735 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
6738 static always_inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
6740 #if defined(TARGET_PPC64)
6741 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6743 TCGv_i64 t0 = tcg_temp_new_i64();
6744 gen_qemu_ld64(t0, addr, ctx->mem_idx);
6745 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
6746 tcg_gen_shri_i64(t0, t0, 32);
6747 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
6748 tcg_temp_free_i64(t0);
6752 static always_inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
6754 #if defined(TARGET_PPC64)
6755 TCGv t0 = tcg_temp_new();
6756 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6757 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6758 tcg_gen_addi_tl(addr, addr, 4);
6759 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6760 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6763 gen_qemu_ld32u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6764 tcg_gen_addi_tl(addr, addr, 4);
6765 gen_qemu_ld32u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6769 static always_inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
6771 TCGv t0 = tcg_temp_new();
6772 #if defined(TARGET_PPC64)
6773 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6774 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6775 tcg_gen_addi_tl(addr, addr, 2);
6776 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6777 tcg_gen_shli_tl(t0, t0, 32);
6778 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6779 tcg_gen_addi_tl(addr, addr, 2);
6780 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6781 tcg_gen_shli_tl(t0, t0, 16);
6782 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6783 tcg_gen_addi_tl(addr, addr, 2);
6784 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6785 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6787 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6788 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6789 tcg_gen_addi_tl(addr, addr, 2);
6790 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6791 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6792 tcg_gen_addi_tl(addr, addr, 2);
6793 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6794 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6795 tcg_gen_addi_tl(addr, addr, 2);
6796 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6797 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6802 static always_inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
6804 TCGv t0 = tcg_temp_new();
6805 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6806 #if defined(TARGET_PPC64)
6807 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6808 tcg_gen_shli_tl(t0, t0, 16);
6809 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6811 tcg_gen_shli_tl(t0, t0, 16);
6812 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6813 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6818 static always_inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
6820 TCGv t0 = tcg_temp_new();
6821 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6822 #if defined(TARGET_PPC64)
6823 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6824 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6826 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6827 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6832 static always_inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
6834 TCGv t0 = tcg_temp_new();
6835 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6836 #if defined(TARGET_PPC64)
6837 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6838 tcg_gen_ext32u_tl(t0, t0);
6839 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6841 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6842 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6847 static always_inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
6849 TCGv t0 = tcg_temp_new();
6850 #if defined(TARGET_PPC64)
6851 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6852 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6853 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6854 tcg_gen_shli_tl(t0, t0, 16);
6855 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6857 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6858 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6859 tcg_gen_addi_tl(addr, addr, 2);
6860 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6861 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
6866 static always_inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
6868 #if defined(TARGET_PPC64)
6869 TCGv t0 = tcg_temp_new();
6870 gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6871 tcg_gen_addi_tl(addr, addr, 2);
6872 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6873 tcg_gen_shli_tl(t0, t0, 32);
6874 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6877 gen_qemu_ld16u(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6878 tcg_gen_addi_tl(addr, addr, 2);
6879 gen_qemu_ld16u(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6883 static always_inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
6885 #if defined(TARGET_PPC64)
6886 TCGv t0 = tcg_temp_new();
6887 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6888 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
6889 tcg_gen_addi_tl(addr, addr, 2);
6890 gen_qemu_ld16s(t0, addr, ctx->mem_idx);
6891 tcg_gen_shli_tl(t0, t0, 32);
6892 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6895 gen_qemu_ld16s(cpu_gprh[rD(ctx->opcode)], addr, ctx->mem_idx);
6896 tcg_gen_addi_tl(addr, addr, 2);
6897 gen_qemu_ld16s(cpu_gpr[rD(ctx->opcode)], addr, ctx->mem_idx);
6901 static always_inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
6903 TCGv t0 = tcg_temp_new();
6904 gen_qemu_ld32u(t0, addr, ctx->mem_idx);
6905 #if defined(TARGET_PPC64)
6906 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
6907 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6909 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
6910 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6915 static always_inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
6917 TCGv t0 = tcg_temp_new();
6918 #if defined(TARGET_PPC64)
6919 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6920 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
6921 tcg_gen_shli_tl(t0, t0, 32);
6922 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6923 tcg_gen_addi_tl(addr, addr, 2);
6924 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6925 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6926 tcg_gen_shli_tl(t0, t0, 16);
6927 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
6929 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6930 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
6931 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6932 tcg_gen_addi_tl(addr, addr, 2);
6933 gen_qemu_ld16u(t0, addr, ctx->mem_idx);
6934 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
6935 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
6940 static always_inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
6942 #if defined(TARGET_PPC64)
6943 gen_qemu_st64(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6945 TCGv_i64 t0 = tcg_temp_new_i64();
6946 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
6947 gen_qemu_st64(t0, addr, ctx->mem_idx);
6948 tcg_temp_free_i64(t0);
6952 static always_inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
6954 #if defined(TARGET_PPC64)
6955 TCGv t0 = tcg_temp_new();
6956 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
6957 gen_qemu_st32(t0, addr, ctx->mem_idx);
6960 gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
6962 tcg_gen_addi_tl(addr, addr, 4);
6963 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6966 static always_inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
6968 TCGv t0 = tcg_temp_new();
6969 #if defined(TARGET_PPC64)
6970 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
6972 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
6974 gen_qemu_st16(t0, addr, ctx->mem_idx);
6975 tcg_gen_addi_tl(addr, addr, 2);
6976 #if defined(TARGET_PPC64)
6977 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
6978 gen_qemu_st16(t0, addr, ctx->mem_idx);
6980 gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
6982 tcg_gen_addi_tl(addr, addr, 2);
6983 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
6984 gen_qemu_st16(t0, addr, ctx->mem_idx);
6986 tcg_gen_addi_tl(addr, addr, 2);
6987 gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
6990 static always_inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
6992 TCGv t0 = tcg_temp_new();
6993 #if defined(TARGET_PPC64)
6994 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
6996 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
6998 gen_qemu_st16(t0, addr, ctx->mem_idx);
6999 tcg_gen_addi_tl(addr, addr, 2);
7000 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
7001 gen_qemu_st16(t0, addr, ctx->mem_idx);
7005 static always_inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
7007 #if defined(TARGET_PPC64)
7008 TCGv t0 = tcg_temp_new();
7009 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7010 gen_qemu_st16(t0, addr, ctx->mem_idx);
7013 gen_qemu_st16(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7015 tcg_gen_addi_tl(addr, addr, 2);
7016 gen_qemu_st16(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7019 static always_inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
7021 #if defined(TARGET_PPC64)
7022 TCGv t0 = tcg_temp_new();
7023 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
7024 gen_qemu_st32(t0, addr, ctx->mem_idx);
7027 gen_qemu_st32(cpu_gprh[rS(ctx->opcode)], addr, ctx->mem_idx);
7031 static always_inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
7033 gen_qemu_st32(cpu_gpr[rS(ctx->opcode)], addr, ctx->mem_idx);
7036 #define GEN_SPEOP_LDST(name, opc2, sh) \
7037 GEN_HANDLER(gen_##name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE) \
7040 if (unlikely(!ctx->spe_enabled)) { \
7041 GEN_EXCP_NO_AP(ctx); \
7044 t0 = tcg_temp_new(); \
7045 if (Rc(ctx->opcode)) { \
7046 gen_addr_spe_imm_index(t0, ctx, sh); \
7048 gen_addr_reg_index(t0, ctx); \
7050 gen_op_##name(ctx, t0); \
7051 tcg_temp_free(t0); \
7054 GEN_SPEOP_LDST(evldd, 0x00, 3);
7055 GEN_SPEOP_LDST(evldw, 0x01, 3);
7056 GEN_SPEOP_LDST(evldh, 0x02, 3);
7057 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
7058 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
7059 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
7060 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
7061 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
7062 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
7063 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
7064 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
7066 GEN_SPEOP_LDST(evstdd, 0x10, 3);
7067 GEN_SPEOP_LDST(evstdw, 0x11, 3);
7068 GEN_SPEOP_LDST(evstdh, 0x12, 3);
7069 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
7070 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
7071 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
7072 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
7074 /* Multiply and add - TODO */
7076 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
7077 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
7078 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
7079 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
7080 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
7081 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
7082 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
7083 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
7084 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
7085 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
7086 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
7087 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
7089 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
7090 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
7091 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
7092 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
7093 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
7094 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
7095 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
7096 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
7097 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
7098 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
7099 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
7100 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
7101 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
7102 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
7104 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
7105 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
7106 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
7107 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
7108 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
7109 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
7111 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
7112 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
7113 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
7114 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
7115 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
7116 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
7117 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
7118 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
7119 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
7120 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
7121 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
7122 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
7124 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
7125 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
7126 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
7127 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
7128 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
7130 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
7131 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
7132 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
7133 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
7134 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
7135 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
7136 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
7137 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
7138 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
7139 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
7140 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
7141 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
7143 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
7144 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
7145 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
7146 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
7147 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
7150 /*** SPE floating-point extension ***/
7151 #if defined(TARGET_PPC64)
7152 #define GEN_SPEFPUOP_CONV_32_32(name) \
7153 static always_inline void gen_##name (DisasContext *ctx) \
7157 t0 = tcg_temp_new_i32(); \
7158 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7159 gen_helper_##name(t0, t0); \
7160 t1 = tcg_temp_new(); \
7161 tcg_gen_extu_i32_tl(t1, t0); \
7162 tcg_temp_free_i32(t0); \
7163 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7164 0xFFFFFFFF00000000ULL); \
7165 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7166 tcg_temp_free(t1); \
7168 #define GEN_SPEFPUOP_CONV_32_64(name) \
7169 static always_inline void gen_##name (DisasContext *ctx) \
7173 t0 = tcg_temp_new_i32(); \
7174 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7175 t1 = tcg_temp_new(); \
7176 tcg_gen_extu_i32_tl(t1, t0); \
7177 tcg_temp_free_i32(t0); \
7178 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7179 0xFFFFFFFF00000000ULL); \
7180 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
7181 tcg_temp_free(t1); \
7183 #define GEN_SPEFPUOP_CONV_64_32(name) \
7184 static always_inline void gen_##name (DisasContext *ctx) \
7186 TCGv_i32 t0 = tcg_temp_new_i32(); \
7187 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7188 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7189 tcg_temp_free_i32(t0); \
7191 #define GEN_SPEFPUOP_CONV_64_64(name) \
7192 static always_inline void gen_##name (DisasContext *ctx) \
7194 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7196 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7197 static always_inline void gen_##name (DisasContext *ctx) \
7201 if (unlikely(!ctx->spe_enabled)) { \
7202 GEN_EXCP_NO_AP(ctx); \
7205 t0 = tcg_temp_new_i32(); \
7206 t1 = tcg_temp_new_i32(); \
7207 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7208 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7209 gen_helper_##name(t0, t0, t1); \
7210 tcg_temp_free_i32(t1); \
7211 t2 = tcg_temp_new(); \
7212 tcg_gen_extu_i32_tl(t2, t0); \
7213 tcg_temp_free_i32(t0); \
7214 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
7215 0xFFFFFFFF00000000ULL); \
7216 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
7217 tcg_temp_free(t2); \
7219 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7220 static always_inline void gen_##name (DisasContext *ctx) \
7222 if (unlikely(!ctx->spe_enabled)) { \
7223 GEN_EXCP_NO_AP(ctx); \
7226 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7227 cpu_gpr[rB(ctx->opcode)]); \
7229 #define GEN_SPEFPUOP_COMP_32(name) \
7230 static always_inline void gen_##name (DisasContext *ctx) \
7233 if (unlikely(!ctx->spe_enabled)) { \
7234 GEN_EXCP_NO_AP(ctx); \
7237 t0 = tcg_temp_new_i32(); \
7238 t1 = tcg_temp_new_i32(); \
7239 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7240 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7241 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7242 tcg_temp_free_i32(t0); \
7243 tcg_temp_free_i32(t1); \
7245 #define GEN_SPEFPUOP_COMP_64(name) \
7246 static always_inline void gen_##name (DisasContext *ctx) \
7248 if (unlikely(!ctx->spe_enabled)) { \
7249 GEN_EXCP_NO_AP(ctx); \
7252 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7253 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7256 #define GEN_SPEFPUOP_CONV_32_32(name) \
7257 static always_inline void gen_##name (DisasContext *ctx) \
7259 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7261 #define GEN_SPEFPUOP_CONV_32_64(name) \
7262 static always_inline void gen_##name (DisasContext *ctx) \
7264 TCGv_i64 t0 = tcg_temp_new_i64(); \
7265 gen_load_gpr64(t0, rB(ctx->opcode)); \
7266 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], t0); \
7267 tcg_temp_free_i64(t0); \
7269 #define GEN_SPEFPUOP_CONV_64_32(name) \
7270 static always_inline void gen_##name (DisasContext *ctx) \
7272 TCGv_i64 t0 = tcg_temp_new_i64(); \
7273 gen_helper_##name(t0, cpu_gpr[rB(ctx->opcode)]); \
7274 gen_store_gpr64(rD(ctx->opcode), t0); \
7275 tcg_temp_free_i64(t0); \
7277 #define GEN_SPEFPUOP_CONV_64_64(name) \
7278 static always_inline void gen_##name (DisasContext *ctx) \
7280 TCGv_i64 t0 = tcg_temp_new_i64(); \
7281 gen_load_gpr64(t0, rB(ctx->opcode)); \
7282 gen_helper_##name(t0, t0); \
7283 gen_store_gpr64(rD(ctx->opcode), t0); \
7284 tcg_temp_free_i64(t0); \
7286 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
7287 static always_inline void gen_##name (DisasContext *ctx) \
7289 if (unlikely(!ctx->spe_enabled)) { \
7290 GEN_EXCP_NO_AP(ctx); \
7293 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], \
7294 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7296 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
7297 static always_inline void gen_##name (DisasContext *ctx) \
7300 if (unlikely(!ctx->spe_enabled)) { \
7301 GEN_EXCP_NO_AP(ctx); \
7304 t0 = tcg_temp_new_i64(); \
7305 t1 = tcg_temp_new_i64(); \
7306 gen_load_gpr64(t0, rA(ctx->opcode)); \
7307 gen_load_gpr64(t1, rB(ctx->opcode)); \
7308 gen_helper_##name(t0, t0, t1); \
7309 gen_store_gpr64(rD(ctx->opcode), t0); \
7310 tcg_temp_free_i64(t0); \
7311 tcg_temp_free_i64(t1); \
7313 #define GEN_SPEFPUOP_COMP_32(name) \
7314 static always_inline void gen_##name (DisasContext *ctx) \
7316 if (unlikely(!ctx->spe_enabled)) { \
7317 GEN_EXCP_NO_AP(ctx); \
7320 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
7321 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
7323 #define GEN_SPEFPUOP_COMP_64(name) \
7324 static always_inline void gen_##name (DisasContext *ctx) \
7327 if (unlikely(!ctx->spe_enabled)) { \
7328 GEN_EXCP_NO_AP(ctx); \
7331 t0 = tcg_temp_new_i64(); \
7332 t1 = tcg_temp_new_i64(); \
7333 gen_load_gpr64(t0, rA(ctx->opcode)); \
7334 gen_load_gpr64(t1, rB(ctx->opcode)); \
7335 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], t0, t1); \
7336 tcg_temp_free_i64(t0); \
7337 tcg_temp_free_i64(t1); \
7341 /* Single precision floating-point vectors operations */
7343 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
7344 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
7345 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
7346 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
7347 static always_inline void gen_evfsabs (DisasContext *ctx)
7349 if (unlikely(!ctx->spe_enabled)) {
7350 GEN_EXCP_NO_AP(ctx);
7353 #if defined(TARGET_PPC64)
7354 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
7356 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
7357 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7360 static always_inline void gen_evfsnabs (DisasContext *ctx)
7362 if (unlikely(!ctx->spe_enabled)) {
7363 GEN_EXCP_NO_AP(ctx);
7366 #if defined(TARGET_PPC64)
7367 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7369 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7370 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7373 static always_inline void gen_evfsneg (DisasContext *ctx)
7375 if (unlikely(!ctx->spe_enabled)) {
7376 GEN_EXCP_NO_AP(ctx);
7379 #if defined(TARGET_PPC64)
7380 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
7382 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7383 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7388 GEN_SPEFPUOP_CONV_64_64(evfscfui);
7389 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
7390 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
7391 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
7392 GEN_SPEFPUOP_CONV_64_64(evfsctui);
7393 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
7394 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
7395 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
7396 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
7397 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
7400 GEN_SPEFPUOP_COMP_64(evfscmpgt);
7401 GEN_SPEFPUOP_COMP_64(evfscmplt);
7402 GEN_SPEFPUOP_COMP_64(evfscmpeq);
7403 GEN_SPEFPUOP_COMP_64(evfststgt);
7404 GEN_SPEFPUOP_COMP_64(evfststlt);
7405 GEN_SPEFPUOP_COMP_64(evfststeq);
7407 /* Opcodes definitions */
7408 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
7409 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
7410 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
7411 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
7412 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
7413 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
7414 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
7415 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
7416 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
7417 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
7418 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
7419 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
7420 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
7421 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
7423 /* Single precision floating-point operations */
7425 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
7426 GEN_SPEFPUOP_ARITH2_32_32(efssub);
7427 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
7428 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
7429 static always_inline void gen_efsabs (DisasContext *ctx)
7431 if (unlikely(!ctx->spe_enabled)) {
7432 GEN_EXCP_NO_AP(ctx);
7435 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
7437 static always_inline void gen_efsnabs (DisasContext *ctx)
7439 if (unlikely(!ctx->spe_enabled)) {
7440 GEN_EXCP_NO_AP(ctx);
7443 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7445 static always_inline void gen_efsneg (DisasContext *ctx)
7447 if (unlikely(!ctx->spe_enabled)) {
7448 GEN_EXCP_NO_AP(ctx);
7451 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
7455 GEN_SPEFPUOP_CONV_32_32(efscfui);
7456 GEN_SPEFPUOP_CONV_32_32(efscfsi);
7457 GEN_SPEFPUOP_CONV_32_32(efscfuf);
7458 GEN_SPEFPUOP_CONV_32_32(efscfsf);
7459 GEN_SPEFPUOP_CONV_32_32(efsctui);
7460 GEN_SPEFPUOP_CONV_32_32(efsctsi);
7461 GEN_SPEFPUOP_CONV_32_32(efsctuf);
7462 GEN_SPEFPUOP_CONV_32_32(efsctsf);
7463 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
7464 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
7465 GEN_SPEFPUOP_CONV_32_64(efscfd);
7468 GEN_SPEFPUOP_COMP_32(efscmpgt);
7469 GEN_SPEFPUOP_COMP_32(efscmplt);
7470 GEN_SPEFPUOP_COMP_32(efscmpeq);
7471 GEN_SPEFPUOP_COMP_32(efststgt);
7472 GEN_SPEFPUOP_COMP_32(efststlt);
7473 GEN_SPEFPUOP_COMP_32(efststeq);
7475 /* Opcodes definitions */
7476 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
7477 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
7478 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
7479 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
7480 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
7481 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
7482 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
7483 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
7484 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
7485 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
7486 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
7487 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
7488 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
7489 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
7491 /* Double precision floating-point operations */
7493 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
7494 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
7495 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
7496 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
7497 static always_inline void gen_efdabs (DisasContext *ctx)
7499 if (unlikely(!ctx->spe_enabled)) {
7500 GEN_EXCP_NO_AP(ctx);
7503 #if defined(TARGET_PPC64)
7504 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
7506 tcg_gen_andi_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
7509 static always_inline void gen_efdnabs (DisasContext *ctx)
7511 if (unlikely(!ctx->spe_enabled)) {
7512 GEN_EXCP_NO_AP(ctx);
7515 #if defined(TARGET_PPC64)
7516 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
7518 tcg_gen_ori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7521 static always_inline void gen_efdneg (DisasContext *ctx)
7523 if (unlikely(!ctx->spe_enabled)) {
7524 GEN_EXCP_NO_AP(ctx);
7527 #if defined(TARGET_PPC64)
7528 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
7530 tcg_gen_xori_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
7535 GEN_SPEFPUOP_CONV_64_32(efdcfui);
7536 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
7537 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
7538 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
7539 GEN_SPEFPUOP_CONV_32_64(efdctui);
7540 GEN_SPEFPUOP_CONV_32_64(efdctsi);
7541 GEN_SPEFPUOP_CONV_32_64(efdctuf);
7542 GEN_SPEFPUOP_CONV_32_64(efdctsf);
7543 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
7544 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
7545 GEN_SPEFPUOP_CONV_64_32(efdcfs);
7546 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
7547 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
7548 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
7549 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
7552 GEN_SPEFPUOP_COMP_64(efdcmpgt);
7553 GEN_SPEFPUOP_COMP_64(efdcmplt);
7554 GEN_SPEFPUOP_COMP_64(efdcmpeq);
7555 GEN_SPEFPUOP_COMP_64(efdtstgt);
7556 GEN_SPEFPUOP_COMP_64(efdtstlt);
7557 GEN_SPEFPUOP_COMP_64(efdtsteq);
7559 /* Opcodes definitions */
7560 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
7561 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
7562 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
7563 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
7564 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
7565 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
7566 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
7567 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
7568 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
7569 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
7570 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
7571 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
7572 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
7573 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
7574 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
7575 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
7577 /* End opcode list */
7578 GEN_OPCODE_MARK(end);
7580 #include "translate_init.c"
7581 #include "helper_regs.h"
7583 /*****************************************************************************/
7584 /* Misc PowerPC helpers */
7585 void cpu_dump_state (CPUState *env, FILE *f,
7586 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7594 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
7595 env->nip, env->lr, env->ctr, env->xer);
7596 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
7597 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
7598 #if !defined(NO_TIMER_DUMP)
7599 cpu_fprintf(f, "TB %08x %08x "
7600 #if !defined(CONFIG_USER_ONLY)
7604 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
7605 #if !defined(CONFIG_USER_ONLY)
7606 , cpu_ppc_load_decr(env)
7610 for (i = 0; i < 32; i++) {
7611 if ((i & (RGPL - 1)) == 0)
7612 cpu_fprintf(f, "GPR%02d", i);
7613 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
7614 if ((i & (RGPL - 1)) == (RGPL - 1))
7615 cpu_fprintf(f, "\n");
7617 cpu_fprintf(f, "CR ");
7618 for (i = 0; i < 8; i++)
7619 cpu_fprintf(f, "%01x", env->crf[i]);
7620 cpu_fprintf(f, " [");
7621 for (i = 0; i < 8; i++) {
7623 if (env->crf[i] & 0x08)
7625 else if (env->crf[i] & 0x04)
7627 else if (env->crf[i] & 0x02)
7629 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
7631 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
7632 for (i = 0; i < 32; i++) {
7633 if ((i & (RFPL - 1)) == 0)
7634 cpu_fprintf(f, "FPR%02d", i);
7635 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
7636 if ((i & (RFPL - 1)) == (RFPL - 1))
7637 cpu_fprintf(f, "\n");
7639 #if !defined(CONFIG_USER_ONLY)
7640 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
7641 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
7648 void cpu_dump_statistics (CPUState *env, FILE*f,
7649 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7652 #if defined(DO_PPC_STATISTICS)
7653 opc_handler_t **t1, **t2, **t3, *handler;
7657 for (op1 = 0; op1 < 64; op1++) {
7659 if (is_indirect_opcode(handler)) {
7660 t2 = ind_table(handler);
7661 for (op2 = 0; op2 < 32; op2++) {
7663 if (is_indirect_opcode(handler)) {
7664 t3 = ind_table(handler);
7665 for (op3 = 0; op3 < 32; op3++) {
7667 if (handler->count == 0)
7669 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
7671 op1, op2, op3, op1, (op3 << 5) | op2,
7673 handler->count, handler->count);
7676 if (handler->count == 0)
7678 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
7680 op1, op2, op1, op2, handler->oname,
7681 handler->count, handler->count);
7685 if (handler->count == 0)
7687 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
7688 op1, op1, handler->oname,
7689 handler->count, handler->count);
7695 /*****************************************************************************/
7696 static always_inline void gen_intermediate_code_internal (CPUState *env,
7697 TranslationBlock *tb,
7700 DisasContext ctx, *ctxp = &ctx;
7701 opc_handler_t **table, *handler;
7702 target_ulong pc_start;
7703 uint16_t *gen_opc_end;
7704 int supervisor, little_endian;
7711 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7712 #if defined(OPTIMIZE_FPRF_UPDATE)
7713 gen_fprf_ptr = gen_fprf_buf;
7717 ctx.exception = POWERPC_EXCP_NONE;
7718 ctx.spr_cb = env->spr_cb;
7719 supervisor = env->mmu_idx;
7720 #if !defined(CONFIG_USER_ONLY)
7721 ctx.supervisor = supervisor;
7723 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
7724 #if defined(TARGET_PPC64)
7725 ctx.sf_mode = msr_sf;
7726 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
7728 ctx.mem_idx = (supervisor << 1) | little_endian;
7730 ctx.fpu_enabled = msr_fp;
7731 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
7732 ctx.spe_enabled = msr_spe;
7734 ctx.spe_enabled = 0;
7735 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
7736 ctx.altivec_enabled = msr_vr;
7738 ctx.altivec_enabled = 0;
7739 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
7740 ctx.singlestep_enabled = CPU_SINGLE_STEP;
7742 ctx.singlestep_enabled = 0;
7743 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
7744 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
7745 if (unlikely(env->singlestep_enabled))
7746 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
7747 #if defined (DO_SINGLE_STEP) && 0
7748 /* Single step trace mode */
7752 max_insns = tb->cflags & CF_COUNT_MASK;
7754 max_insns = CF_COUNT_MASK;
7757 /* Set env in case of segfault during code fetch */
7758 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
7759 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
7760 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
7761 if (bp->pc == ctx.nip) {
7762 gen_update_nip(&ctx, ctx.nip);
7763 gen_helper_raise_debug();
7768 if (unlikely(search_pc)) {
7769 j = gen_opc_ptr - gen_opc_buf;
7773 gen_opc_instr_start[lj++] = 0;
7774 gen_opc_pc[lj] = ctx.nip;
7775 gen_opc_instr_start[lj] = 1;
7776 gen_opc_icount[lj] = num_insns;
7779 #if defined PPC_DEBUG_DISAS
7780 if (loglevel & CPU_LOG_TB_IN_ASM) {
7781 fprintf(logfile, "----------------\n");
7782 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
7783 ctx.nip, supervisor, (int)msr_ir);
7786 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7788 if (unlikely(little_endian)) {
7789 ctx.opcode = bswap32(ldl_code(ctx.nip));
7791 ctx.opcode = ldl_code(ctx.nip);
7793 #if defined PPC_DEBUG_DISAS
7794 if (loglevel & CPU_LOG_TB_IN_ASM) {
7795 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
7796 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
7797 opc3(ctx.opcode), little_endian ? "little" : "big");
7801 table = env->opcodes;
7803 handler = table[opc1(ctx.opcode)];
7804 if (is_indirect_opcode(handler)) {
7805 table = ind_table(handler);
7806 handler = table[opc2(ctx.opcode)];
7807 if (is_indirect_opcode(handler)) {
7808 table = ind_table(handler);
7809 handler = table[opc3(ctx.opcode)];
7812 /* Is opcode *REALLY* valid ? */
7813 if (unlikely(handler->handler == &gen_invalid)) {
7814 if (loglevel != 0) {
7815 fprintf(logfile, "invalid/unsupported opcode: "
7816 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7817 opc1(ctx.opcode), opc2(ctx.opcode),
7818 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7820 printf("invalid/unsupported opcode: "
7821 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7822 opc1(ctx.opcode), opc2(ctx.opcode),
7823 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7826 if (unlikely((ctx.opcode & handler->inval) != 0)) {
7827 if (loglevel != 0) {
7828 fprintf(logfile, "invalid bits: %08x for opcode: "
7829 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7830 ctx.opcode & handler->inval, opc1(ctx.opcode),
7831 opc2(ctx.opcode), opc3(ctx.opcode),
7832 ctx.opcode, ctx.nip - 4);
7834 printf("invalid bits: %08x for opcode: "
7835 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7836 ctx.opcode & handler->inval, opc1(ctx.opcode),
7837 opc2(ctx.opcode), opc3(ctx.opcode),
7838 ctx.opcode, ctx.nip - 4);
7840 GEN_EXCP_INVAL(ctxp);
7844 (*(handler->handler))(&ctx);
7845 #if defined(DO_PPC_STATISTICS)
7848 /* Check trace mode exceptions */
7849 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
7850 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
7851 ctx.exception != POWERPC_SYSCALL &&
7852 ctx.exception != POWERPC_EXCP_TRAP &&
7853 ctx.exception != POWERPC_EXCP_BRANCH)) {
7854 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
7855 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
7856 (env->singlestep_enabled) ||
7857 num_insns >= max_insns)) {
7858 /* if we reach a page boundary or are single stepping, stop
7863 #if defined (DO_SINGLE_STEP)
7867 if (tb->cflags & CF_LAST_IO)
7869 if (ctx.exception == POWERPC_EXCP_NONE) {
7870 gen_goto_tb(&ctx, 0, ctx.nip);
7871 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
7872 if (unlikely(env->singlestep_enabled)) {
7873 gen_update_nip(&ctx, ctx.nip);
7874 gen_helper_raise_debug();
7876 /* Generate the return instruction */
7879 gen_icount_end(tb, num_insns);
7880 *gen_opc_ptr = INDEX_op_end;
7881 if (unlikely(search_pc)) {
7882 j = gen_opc_ptr - gen_opc_buf;
7885 gen_opc_instr_start[lj++] = 0;
7887 tb->size = ctx.nip - pc_start;
7888 tb->icount = num_insns;
7890 #if defined(DEBUG_DISAS)
7891 if (loglevel & CPU_LOG_TB_CPU) {
7892 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
7893 cpu_dump_state(env, logfile, fprintf, 0);
7895 if (loglevel & CPU_LOG_TB_IN_ASM) {
7897 flags = env->bfd_mach;
7898 flags |= little_endian << 16;
7899 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7900 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
7901 fprintf(logfile, "\n");
7906 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
7908 gen_intermediate_code_internal(env, tb, 0);
7911 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
7913 gen_intermediate_code_internal(env, tb, 1);
7916 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7917 unsigned long searched_pc, int pc_pos, void *puc)
7919 env->nip = gen_opc_pc[pc_pos];