X-Git-Url: http://git.maemo.org/git/?a=blobdiff_plain;f=tcg%2Ftcg-op.h;h=7cb6934143435ef1203db6a7cd847740b8b57e1a;hb=b348113d2161a339780e2d9e0479b1f9a53c6cbc;hp=a0eaa81f2db9d9f71d894445a956792254d11a5d;hpb=49516bc0d622112caac9df628caf19010fda8b67;p=qemu diff --git a/tcg/tcg-op.h b/tcg/tcg-op.h index a0eaa81..7cb6934 100644 --- a/tcg/tcg-op.h +++ b/tcg/tcg-op.h @@ -318,7 +318,7 @@ static inline void tcg_gen_br(int label) static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg) { - if (GET_TCGV_I32(ret) != GET_TCGV_I32(arg)) + if (!TCGV_EQUAL_I32(ret, arg)) tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg); } @@ -436,7 +436,11 @@ static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { - tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2); + if (TCGV_EQUAL_I32(arg1, arg2)) { + tcg_gen_mov_i32(ret, arg1); + } else { + tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2); + } } static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -455,7 +459,11 @@ static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { - tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2); + if (TCGV_EQUAL_I32(arg1, arg2)) { + tcg_gen_mov_i32(ret, arg1); + } else { + tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2); + } } static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -474,7 +482,11 @@ static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { - tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2); + if (TCGV_EQUAL_I32(arg1, arg2)) { + tcg_gen_movi_i32(ret, 0); + } else { + tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2); + } } static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -625,7 +637,7 @@ static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) { - if (GET_TCGV_I64(ret) != GET_TCGV_I64(arg)) { + if (!TCGV_EQUAL_I64(ret, arg)) { tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg)); tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); } @@ -654,7 +666,7 @@ static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset) { - tcg_gen_ld16u_i32(TCGV_LOW(ret), TCGV_LOW(arg2), offset); + tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset); tcg_gen_movi_i32(TCGV_HIGH(ret), 0); } @@ -858,7 +870,7 @@ static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg) { - if (GET_TCGV_I64(ret) != GET_TCGV_I64(arg)) + if (!TCGV_EQUAL_I64(ret, arg)) tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg); } @@ -943,7 +955,11 @@ static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { - tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2); + if (TCGV_EQUAL_I64(arg1, arg2)) { + tcg_gen_mov_i64(ret, arg1); + } else { + tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2); + } } static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) @@ -955,7 +971,11 @@ static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { - tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2); + if (TCGV_EQUAL_I64(arg1, arg2)) { + tcg_gen_mov_i64(ret, arg1); + } else { + tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2); + } } static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) @@ -967,7 +987,11 @@ static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { - tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2); + if (TCGV_EQUAL_I64(arg1, arg2)) { + tcg_gen_movi_i64(ret, 0); + } else { + tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2); + } } static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) @@ -1183,23 +1207,20 @@ static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg) #ifdef TCG_TARGET_HAS_bswap16_i32 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg); #else - TCGv_i32 t0, t1; - t0 = tcg_temp_new_i32(); - t1 = tcg_temp_new_i32(); + TCGv_i32 t0 = tcg_temp_new_i32(); - tcg_gen_shri_i32(t0, arg, 8); - tcg_gen_andi_i32(t1, arg, 0x000000ff); - tcg_gen_shli_i32(t1, t1, 8); - tcg_gen_or_i32(ret, t0, t1); + tcg_gen_ext8u_i32(t0, arg); + tcg_gen_shli_i32(t0, t0, 8); + tcg_gen_shri_i32(ret, arg, 8); + tcg_gen_or_i32(ret, ret, t0); tcg_temp_free_i32(t0); - tcg_temp_free_i32(t1); #endif } -static inline void tcg_gen_bswap_i32(TCGv_i32 ret, TCGv_i32 arg) +static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg) { -#ifdef TCG_TARGET_HAS_bswap_i32 - tcg_gen_op2_i32(INDEX_op_bswap_i32, ret, arg); +#ifdef TCG_TARGET_HAS_bswap32_i32 + tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg); #else TCGv_i32 t0, t1; t0 = tcg_temp_new_i32(); @@ -1276,14 +1297,28 @@ static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31); } -static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg) +/* Note: we assume the six high bytes are set to zero */ +static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) +{ + tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); + tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg)); +} + +/* Note: we assume the four high bytes are set to zero */ +static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) +{ + tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg)); + tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg)); +} + +static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) { TCGv_i32 t0, t1; t0 = tcg_temp_new_i32(); t1 = tcg_temp_new_i32(); - tcg_gen_bswap_i32(t0, TCGV_LOW(arg)); - tcg_gen_bswap_i32(t1, TCGV_HIGH(arg)); + tcg_gen_bswap32_i32(t0, TCGV_LOW(arg)); + tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg)); tcg_gen_mov_i32(TCGV_LOW(ret), t1); tcg_gen_mov_i32(TCGV_HIGH(ret), t0); tcg_temp_free_i32(t0); @@ -1357,14 +1392,57 @@ static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg) tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg))); } -static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg) +/* Note: we assume the six high bytes are set to zero */ +static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg) { -#ifdef TCG_TARGET_HAS_bswap_i64 - tcg_gen_op2_i64(INDEX_op_bswap_i64, ret, arg); +#ifdef TCG_TARGET_HAS_bswap16_i64 + tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg); #else - TCGv_i32 t0, t1; - t0 = tcg_temp_new_i32(); - t1 = tcg_temp_new_i32(); + TCGv_i64 t0 = tcg_temp_new_i64(); + + tcg_gen_ext8u_i64(t0, arg); + tcg_gen_shli_i64(t0, t0, 8); + tcg_gen_shri_i64(ret, arg, 8); + tcg_gen_or_i64(ret, ret, t0); + tcg_temp_free_i64(t0); +#endif +} + +/* Note: we assume the four high bytes are set to zero */ +static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg) +{ +#ifdef TCG_TARGET_HAS_bswap32_i64 + tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg); +#else + TCGv_i64 t0, t1; + t0 = tcg_temp_new_i64(); + t1 = tcg_temp_new_i64(); + + tcg_gen_shli_i64(t0, arg, 24); + tcg_gen_ext32u_i64(t0, t0); + + tcg_gen_andi_i64(t1, arg, 0x0000ff00); + tcg_gen_shli_i64(t1, t1, 8); + tcg_gen_or_i64(t0, t0, t1); + + tcg_gen_shri_i64(t1, arg, 8); + tcg_gen_andi_i64(t1, t1, 0x0000ff00); + tcg_gen_or_i64(t0, t0, t1); + + tcg_gen_shri_i64(t1, arg, 24); + tcg_gen_or_i64(ret, t0, t1); + tcg_temp_free_i64(t0); + tcg_temp_free_i64(t1); +#endif +} + +static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg) +{ +#ifdef TCG_TARGET_HAS_bswap64_i64 + tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg); +#else + TCGv_i64 t0 = tcg_temp_new_i64(); + TCGv_i64 t1 = tcg_temp_new_i64(); tcg_gen_shli_i64(t0, arg, 56); @@ -1394,8 +1472,8 @@ static inline void tcg_gen_bswap_i64(TCGv_i64 ret, TCGv_i64 arg) tcg_gen_shri_i64(t1, arg, 56); tcg_gen_or_i64(ret, t0, t1); - tcg_temp_free_i32(t0); - tcg_temp_free_i32(t1); + tcg_temp_free_i64(t0); + tcg_temp_free_i64(t1); #endif } @@ -1425,12 +1503,20 @@ static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg) static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg) { +#ifdef TCG_TARGET_HAS_not_i32 + tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg); +#else tcg_gen_xori_i32(ret, arg, -1); +#endif } static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg) { +#ifdef TCG_TARGET_HAS_not_i64 + tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg); +#else tcg_gen_xori_i64(ret, arg, -1); +#endif } static inline void tcg_gen_discard_i32(TCGv_i32 arg) @@ -1501,56 +1587,38 @@ static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { - TCGv_i32 t0; - t0 = tcg_temp_new_i32(); - tcg_gen_xor_i32(t0, arg1, arg2); - tcg_gen_not_i32(ret, t0); - tcg_temp_free_i32(t0); + tcg_gen_xor_i32(ret, arg1, arg2); + tcg_gen_not_i32(ret, ret); } static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { - TCGv_i64 t0; - t0 = tcg_temp_new_i64(); - tcg_gen_xor_i64(t0, arg1, arg2); - tcg_gen_not_i64(ret, t0); - tcg_temp_free_i64(t0); + tcg_gen_xor_i64(ret, arg1, arg2); + tcg_gen_not_i64(ret, ret); } static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { - TCGv_i32 t0; - t0 = tcg_temp_new_i32(); - tcg_gen_and_i32(t0, arg1, arg2); - tcg_gen_not_i32(ret, t0); - tcg_temp_free_i32(t0); + tcg_gen_and_i32(ret, arg1, arg2); + tcg_gen_not_i32(ret, ret); } static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { - TCGv_i64 t0; - t0 = tcg_temp_new_i64(); - tcg_gen_and_i64(t0, arg1, arg2); - tcg_gen_not_i64(ret, t0); - tcg_temp_free_i64(t0); + tcg_gen_and_i64(ret, arg1, arg2); + tcg_gen_not_i64(ret, ret); } static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { - TCGv_i32 t0; - t0 = tcg_temp_new_i32(); - tcg_gen_or_i32(t0, arg1, arg2); - tcg_gen_not_i32(ret, t0); - tcg_temp_free_i32(t0); + tcg_gen_or_i32(ret, arg1, arg2); + tcg_gen_not_i32(ret, ret); } static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { - TCGv_i64 t0; - t0 = tcg_temp_new_i64(); - tcg_gen_or_i64(t0, arg1, arg2); - tcg_gen_not_i64(ret, t0); - tcg_temp_free_i64(t0); + tcg_gen_or_i64(ret, arg1, arg2); + tcg_gen_not_i64(ret, ret); } static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) @@ -1573,6 +1641,9 @@ static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { +#ifdef TCG_TARGET_HAS_rot_i32 + tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2); +#else TCGv_i32 t0, t1; t0 = tcg_temp_new_i32(); @@ -1583,10 +1654,14 @@ static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) tcg_gen_or_i32(ret, t0, t1); tcg_temp_free_i32(t0); tcg_temp_free_i32(t1); +#endif } static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { +#ifdef TCG_TARGET_HAS_rot_i64 + tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2); +#else TCGv_i64 t0, t1; t0 = tcg_temp_new_i64(); @@ -1597,6 +1672,7 @@ static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) tcg_gen_or_i64(ret, t0, t1); tcg_temp_free_i64(t0); tcg_temp_free_i64(t1); +#endif } static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -1605,6 +1681,11 @@ static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) if (arg2 == 0) { tcg_gen_mov_i32(ret, arg1); } else { +#ifdef TCG_TARGET_HAS_rot_i32 + TCGv_i32 t0 = tcg_const_i32(arg2); + tcg_gen_rotl_i32(ret, arg1, t0); + tcg_temp_free_i32(t0); +#else TCGv_i32 t0, t1; t0 = tcg_temp_new_i32(); t1 = tcg_temp_new_i32(); @@ -1613,6 +1694,7 @@ static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) tcg_gen_or_i32(ret, t0, t1); tcg_temp_free_i32(t0); tcg_temp_free_i32(t1); +#endif } } @@ -1622,6 +1704,11 @@ static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) if (arg2 == 0) { tcg_gen_mov_i64(ret, arg1); } else { +#ifdef TCG_TARGET_HAS_rot_i64 + TCGv_i64 t0 = tcg_const_i64(arg2); + tcg_gen_rotl_i64(ret, arg1, t0); + tcg_temp_free_i64(t0); +#else TCGv_i64 t0, t1; t0 = tcg_temp_new_i64(); t1 = tcg_temp_new_i64(); @@ -1630,11 +1717,15 @@ static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) tcg_gen_or_i64(ret, t0, t1); tcg_temp_free_i64(t0); tcg_temp_free_i64(t1); +#endif } } static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) { +#ifdef TCG_TARGET_HAS_rot_i32 + tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2); +#else TCGv_i32 t0, t1; t0 = tcg_temp_new_i32(); @@ -1645,20 +1736,25 @@ static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) tcg_gen_or_i32(ret, t0, t1); tcg_temp_free_i32(t0); tcg_temp_free_i32(t1); +#endif } static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) { +#ifdef TCG_TARGET_HAS_rot_i64 + tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2); +#else TCGv_i64 t0, t1; t0 = tcg_temp_new_i64(); t1 = tcg_temp_new_i64(); - tcg_gen_shl_i64(t0, arg1, arg2); + tcg_gen_shr_i64(t0, arg1, arg2); tcg_gen_subfi_i64(t1, 64, arg2); tcg_gen_shl_i64(t1, arg1, t1); tcg_gen_or_i64(ret, t0, t1); tcg_temp_free_i64(t0); tcg_temp_free_i64(t1); +#endif } static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) @@ -1693,23 +1789,23 @@ static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2) #define tcg_temp_new() tcg_temp_new_i32() #define tcg_global_reg_new tcg_global_reg_new_i32 #define tcg_global_mem_new tcg_global_mem_new_i32 -#define tcg_temp_local_new(t) tcg_temp_local_new_i32() +#define tcg_temp_local_new() tcg_temp_local_new_i32() #define tcg_temp_free tcg_temp_free_i32 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x) -#define TCGV_EQUAL(a, b) (GET_TCGV_I32(a) == GET_TCGV_I32(b)) +#define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b) #else #define TCGv TCGv_i64 #define tcg_temp_new() tcg_temp_new_i64() #define tcg_global_reg_new tcg_global_reg_new_i64 #define tcg_global_mem_new tcg_global_mem_new_i64 -#define tcg_temp_local_new(t) tcg_temp_local_new_i64() +#define tcg_temp_local_new() tcg_temp_local_new_i64() #define tcg_temp_free tcg_temp_free_i64 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x) -#define TCGV_EQUAL(a, b) (GET_TCGV_I64(a) == GET_TCGV_I64(b)) +#define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b) #endif /* debug info: write the PC of the corresponding QEMU CPU instruction */ @@ -1955,6 +2051,10 @@ static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64 #define tcg_gen_mul_tl tcg_gen_mul_i64 #define tcg_gen_muli_tl tcg_gen_muli_i64 +#define tcg_gen_div_tl tcg_gen_div_i64 +#define tcg_gen_rem_tl tcg_gen_rem_i64 +#define tcg_gen_divu_tl tcg_gen_divu_i64 +#define tcg_gen_remu_tl tcg_gen_remu_i64 #define tcg_gen_discard_tl tcg_gen_discard_i64 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64 @@ -1968,6 +2068,9 @@ static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64 +#define tcg_gen_bswap16_tl tcg_gen_bswap16_i64 +#define tcg_gen_bswap32_tl tcg_gen_bswap32_i64 +#define tcg_gen_bswap64_tl tcg_gen_bswap64_i64 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64 #define tcg_gen_andc_tl tcg_gen_andc_i64 #define tcg_gen_eqv_tl tcg_gen_eqv_i64 @@ -2018,6 +2121,10 @@ static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32 #define tcg_gen_mul_tl tcg_gen_mul_i32 #define tcg_gen_muli_tl tcg_gen_muli_i32 +#define tcg_gen_div_tl tcg_gen_div_i32 +#define tcg_gen_rem_tl tcg_gen_rem_i32 +#define tcg_gen_divu_tl tcg_gen_divu_i32 +#define tcg_gen_remu_tl tcg_gen_remu_i32 #define tcg_gen_discard_tl tcg_gen_discard_i32 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32 @@ -2031,6 +2138,8 @@ static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32 #define tcg_gen_ext32u_tl tcg_gen_mov_i32 #define tcg_gen_ext32s_tl tcg_gen_mov_i32 +#define tcg_gen_bswap16_tl tcg_gen_bswap16_i32 +#define tcg_gen_bswap32_tl tcg_gen_bswap32_i32 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64 #define tcg_gen_andc_tl tcg_gen_andc_i32 #define tcg_gen_eqv_tl tcg_gen_eqv_i32 @@ -2054,4 +2163,3 @@ static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index) #define tcg_gen_addi_ptr tcg_gen_addi_i64 #define tcg_gen_ext_i32_ptr tcg_gen_ext_i32_i64 #endif /* TCG_TARGET_REG_BITS != 32 */ -