diff options
Diffstat (limited to 'tcg/tcg-op.h')
-rw-r--r-- | tcg/tcg-op.h | 118 |
1 files changed, 118 insertions, 0 deletions
diff --git a/tcg/tcg-op.h b/tcg/tcg-op.h index f199ce0131..58af59cc25 100644 --- a/tcg/tcg-op.h +++ b/tcg/tcg-op.h @@ -1518,6 +1518,116 @@ static inline void tcg_gen_orc_i64(TCGv ret, TCGv arg1, TCGv arg2) tcg_temp_free(t0); } +static inline void tcg_gen_rotl_i32(TCGv ret, TCGv arg1, TCGv arg2) +{ + TCGv t0, t1; + + t0 = tcg_temp_new(TCG_TYPE_I32); + t1 = tcg_temp_new(TCG_TYPE_I32); + tcg_gen_shl_i32(t0, arg1, arg2); + tcg_gen_subfi_i32(t1, 32, arg2); + tcg_gen_shr_i32(t1, arg1, t1); + tcg_gen_or_i32(ret, t0, t1); + tcg_temp_free(t0); + tcg_temp_free(t1); +} + +static inline void tcg_gen_rotl_i64(TCGv ret, TCGv arg1, TCGv arg2) +{ + TCGv t0, t1; + + t0 = tcg_temp_new(TCG_TYPE_I64); + t1 = tcg_temp_new(TCG_TYPE_I64); + tcg_gen_shl_i64(t0, arg1, arg2); + tcg_gen_subfi_i64(t1, 64, arg2); + tcg_gen_shr_i64(t1, arg1, t1); + tcg_gen_or_i64(ret, t0, t1); + tcg_temp_free(t0); + tcg_temp_free(t1); +} + +static inline void tcg_gen_rotli_i32(TCGv ret, TCGv arg1, int32_t arg2) +{ + /* some cases can be optimized here */ + if (arg2 == 0) { + tcg_gen_mov_i32(ret, arg1); + } else { + TCGv t0, t1; + t0 = tcg_temp_new(TCG_TYPE_I32); + t1 = tcg_temp_new(TCG_TYPE_I32); + tcg_gen_shli_i32(t0, arg1, arg2); + tcg_gen_shri_i32(t1, arg1, 32 - arg2); + tcg_gen_or_i32(ret, t0, t1); + tcg_temp_free(t0); + tcg_temp_free(t1); + } +} + +static inline void tcg_gen_rotli_i64(TCGv ret, TCGv arg1, int64_t arg2) +{ + /* some cases can be optimized here */ + if (arg2 == 0) { + tcg_gen_mov_i64(ret, arg1); + } else { + TCGv t0, t1; + t0 = tcg_temp_new(TCG_TYPE_I64); + t1 = tcg_temp_new(TCG_TYPE_I64); + tcg_gen_shli_i64(t0, arg1, arg2); + tcg_gen_shri_i64(t1, arg1, 64 - arg2); + tcg_gen_or_i64(ret, t0, t1); + tcg_temp_free(t0); + tcg_temp_free(t1); + } +} + +static inline void tcg_gen_rotr_i32(TCGv ret, TCGv arg1, TCGv arg2) +{ + TCGv t0, t1; + + t0 = tcg_temp_new(TCG_TYPE_I32); + t1 = tcg_temp_new(TCG_TYPE_I32); + tcg_gen_shr_i32(t0, arg1, arg2); + tcg_gen_subfi_i32(t1, 32, arg2); + tcg_gen_shl_i32(t1, arg1, t1); + tcg_gen_or_i32(ret, t0, t1); + tcg_temp_free(t0); + tcg_temp_free(t1); +} + +static inline void tcg_gen_rotr_i64(TCGv ret, TCGv arg1, TCGv arg2) +{ + TCGv t0, t1; + + t0 = tcg_temp_new(TCG_TYPE_I64); + t1 = tcg_temp_new(TCG_TYPE_I64); + tcg_gen_shl_i64(t0, arg1, arg2); + tcg_gen_subfi_i64(t1, 64, arg2); + tcg_gen_shl_i64(t1, arg1, t1); + tcg_gen_or_i64(ret, t0, t1); + tcg_temp_free(t0); + tcg_temp_free(t1); +} + +static inline void tcg_gen_rotri_i32(TCGv ret, TCGv arg1, int32_t arg2) +{ + /* some cases can be optimized here */ + if (arg2 == 0) { + tcg_gen_mov_i32(ret, arg1); + } else { + tcg_gen_rotli_i32(ret, arg1, 32 - arg2); + } +} + +static inline void tcg_gen_rotri_i64(TCGv ret, TCGv arg1, int64_t arg2) +{ + /* some cases can be optimized here */ + if (arg2 == 0) { + tcg_gen_mov_i32(ret, arg1); + } else { + tcg_gen_rotli_i64(ret, arg1, 64 - arg2); + } +} + /***************************************/ /* QEMU specific operations. Their type depend on the QEMU CPU type. */ @@ -1777,6 +1887,10 @@ static inline void tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index) #define tcg_gen_nand_tl tcg_gen_nand_i64 #define tcg_gen_nor_tl tcg_gen_nor_i64 #define tcg_gen_orc_tl tcg_gen_orc_i64 +#define tcg_gen_rotl_tl tcg_gen_rotl_i64 +#define tcg_gen_rotli_tl tcg_gen_rotli_i64 +#define tcg_gen_rotr_tl tcg_gen_rotr_i64 +#define tcg_gen_rotri_tl tcg_gen_rotri_i64 #define tcg_const_tl tcg_const_i64 #define tcg_const_local_tl tcg_const_local_i64 #else @@ -1836,6 +1950,10 @@ static inline void tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index) #define tcg_gen_nand_tl tcg_gen_nand_i32 #define tcg_gen_nor_tl tcg_gen_nor_i32 #define tcg_gen_orc_tl tcg_gen_orc_i32 +#define tcg_gen_rotl_tl tcg_gen_rotl_i32 +#define tcg_gen_rotli_tl tcg_gen_rotli_i32 +#define tcg_gen_rotr_tl tcg_gen_rotr_i32 +#define tcg_gen_rotri_tl tcg_gen_rotri_i32 #define tcg_const_tl tcg_const_i32 #define tcg_const_local_tl tcg_const_local_i32 #endif |