diff options
author | Richard Henderson <richard.henderson@linaro.org> | 2023-10-29 14:08:41 -0700 |
---|---|---|
committer | Richard Henderson <richard.henderson@linaro.org> | 2023-11-06 08:27:21 -0800 |
commit | 09607d35f5a9a6aa1c57302c1e27066ad2309e63 (patch) | |
tree | 7e675a78d5f97c3a5449265685e9135cc1401b02 /tcg/tcg-op.c | |
parent | 01bbb6e3eb3368b40384a7e044c9a2d342b8039b (diff) |
tcg: Move 32-bit expanders out of line
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Message-Id: <20231029210848.78234-5-richard.henderson@linaro.org>
Diffstat (limited to 'tcg/tcg-op.c')
-rw-r--r-- | tcg/tcg-op.c | 116 |
1 files changed, 116 insertions, 0 deletions
diff --git a/tcg/tcg-op.c b/tcg/tcg-op.c index a8cbad212d..ab6281ebec 100644 --- a/tcg/tcg-op.c +++ b/tcg/tcg-op.c @@ -351,11 +351,28 @@ void tcg_gen_plugin_cb_end(void) /* 32 bit ops */ +void tcg_gen_discard_i32(TCGv_i32 arg) +{ + tcg_gen_op1_i32(INDEX_op_discard, arg); +} + +void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg) +{ + if (ret != arg) { + tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg); + } +} + void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg) { tcg_gen_mov_i32(ret, tcg_constant_i32(arg)); } +void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2); +} + void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { /* some cases can be optimized here */ @@ -366,6 +383,11 @@ void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } } +void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2); +} + void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2) { if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) { @@ -386,6 +408,20 @@ void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } } +void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg) +{ + if (TCG_TARGET_HAS_neg_i32) { + tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg); + } else { + tcg_gen_subfi_i32(ret, 0, arg); + } +} + +void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2); +} + void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { /* Some cases can be optimized here. */ @@ -414,6 +450,11 @@ void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2)); } +void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2); +} + void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { /* Some cases can be optimized here. */ @@ -426,6 +467,11 @@ void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } } +void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2); +} + void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { /* Some cases can be optimized here. */ @@ -439,6 +485,20 @@ void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } } +void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg) +{ + if (TCG_TARGET_HAS_not_i32) { + tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg); + } else { + tcg_gen_xori_i32(ret, arg, -1); + } +} + +void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2); +} + void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { tcg_debug_assert(arg2 >= 0 && arg2 < 32); @@ -449,6 +509,11 @@ void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } } +void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2); +} + void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { tcg_debug_assert(arg2 >= 0 && arg2 < 32); @@ -459,6 +524,11 @@ void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) } } +void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2); +} + void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { tcg_debug_assert(arg2 >= 0 && arg2 < 32); @@ -527,6 +597,11 @@ void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret, tcg_gen_negsetcond_i32(cond, ret, arg1, tcg_constant_i32(arg2)); } +void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) +{ + tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2); +} + void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2) { if (arg2 == 0) { @@ -1385,6 +1460,47 @@ void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a) tcg_temp_free_i32(t); } +void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset); +} + +void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset); +} + +void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset); +} + +void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset); +} + +void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset); +} + +void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset); +} + +void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset); +} + +void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset) +{ + tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset); +} + + /* 64-bit ops */ #if TCG_TARGET_REG_BITS == 32 |