aboutsummaryrefslogtreecommitdiff
path: root/tcg/tcg-op.h
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2013-08-14 14:35:56 -0700
committerRichard Henderson <rth@twiddle.net>2013-09-02 09:08:29 -0700
commit03271524b66dfc979cc0412bdb5d8d617426b644 (patch)
tree31d2002d9a8ab63e59bbcfd0772599ffa79271c0 /tcg/tcg-op.h
parent4ff78e0dbcd5c795962567fdc1b31e9e03c55b07 (diff)
tcg: Add muluh and mulsh opcodes
Use them in places where mulu2 and muls2 are used. Optimize mulx2 with dead low part to mulxh. Reviewed-by: Aurelien Jarno <aurelien@aurel32.net> Signed-off-by: Richard Henderson <rth@twiddle.net>
Diffstat (limited to 'tcg/tcg-op.h')
-rw-r--r--tcg/tcg-op.h40
1 files changed, 36 insertions, 4 deletions
diff --git a/tcg/tcg-op.h b/tcg/tcg-op.h
index 364964d8d4..3de7545a46 100644
--- a/tcg/tcg-op.h
+++ b/tcg/tcg-op.h
@@ -1039,10 +1039,18 @@ static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
t0 = tcg_temp_new_i64();
t1 = tcg_temp_new_i32();
- tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
- TCGV_LOW(arg1), TCGV_LOW(arg2));
- /* Allow the optimizer room to replace mulu2 with two moves. */
- tcg_gen_op0(INDEX_op_nop);
+ if (TCG_TARGET_HAS_mulu2_i32) {
+ tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
+ TCGV_LOW(arg1), TCGV_LOW(arg2));
+ /* Allow the optimizer room to replace mulu2 with two moves. */
+ tcg_gen_op0(INDEX_op_nop);
+ } else {
+ tcg_debug_assert(TCG_TARGET_HAS_muluh_i32);
+ tcg_gen_op3_i32(INDEX_op_mul_i32, TCGV_LOW(t0),
+ TCGV_LOW(arg1), TCGV_LOW(arg2));
+ tcg_gen_op3_i32(INDEX_op_muluh_i32, TCGV_HIGH(t0),
+ TCGV_LOW(arg1), TCGV_LOW(arg2));
+ }
tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
@@ -2401,6 +2409,12 @@ static inline void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh,
tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
/* Allow the optimizer room to replace mulu2 with two moves. */
tcg_gen_op0(INDEX_op_nop);
+ } else if (TCG_TARGET_HAS_muluh_i32) {
+ TCGv_i32 t = tcg_temp_new_i32();
+ tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
+ tcg_gen_mov_i32(rl, t);
+ tcg_temp_free_i32(t);
} else {
TCGv_i64 t0 = tcg_temp_new_i64();
TCGv_i64 t1 = tcg_temp_new_i64();
@@ -2420,6 +2434,12 @@ static inline void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh,
tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
/* Allow the optimizer room to replace muls2 with two moves. */
tcg_gen_op0(INDEX_op_nop);
+ } else if (TCG_TARGET_HAS_mulsh_i32) {
+ TCGv_i32 t = tcg_temp_new_i32();
+ tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
+ tcg_gen_mov_i32(rl, t);
+ tcg_temp_free_i32(t);
} else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_mulu2_i32) {
TCGv_i32 t0 = tcg_temp_new_i32();
TCGv_i32 t1 = tcg_temp_new_i32();
@@ -2499,6 +2519,12 @@ static inline void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh,
tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
/* Allow the optimizer room to replace mulu2 with two moves. */
tcg_gen_op0(INDEX_op_nop);
+ } else if (TCG_TARGET_HAS_muluh_i64) {
+ TCGv_i64 t = tcg_temp_new_i64();
+ tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
+ tcg_gen_mov_i64(rl, t);
+ tcg_temp_free_i64(t);
} else if (TCG_TARGET_HAS_mulu2_i64) {
TCGv_i64 t0 = tcg_temp_new_i64();
TCGv_i64 t1 = tcg_temp_new_i64();
@@ -2540,6 +2566,12 @@ static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
/* Allow the optimizer room to replace muls2 with two moves. */
tcg_gen_op0(INDEX_op_nop);
+ } else if (TCG_TARGET_HAS_mulsh_i64) {
+ TCGv_i64 t = tcg_temp_new_i64();
+ tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
+ tcg_gen_mov_i64(rl, t);
+ tcg_temp_free_i64(t);
} else {
TCGv_i64 t0 = tcg_temp_new_i64();
int sizemask = 0;