aboutsummaryrefslogtreecommitdiff
path: root/tcg/tcg-op.h
diff options
context:
space:
mode:
Diffstat (limited to 'tcg/tcg-op.h')
-rw-r--r--tcg/tcg-op.h48
1 files changed, 22 insertions, 26 deletions
diff --git a/tcg/tcg-op.h b/tcg/tcg-op.h
index 7eabf22f01..8d4ff7da9b 100644
--- a/tcg/tcg-op.h
+++ b/tcg/tcg-op.h
@@ -858,7 +858,7 @@ static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
{
/* since arg2 and ret have different types, they cannot be the
same temporary */
-#ifdef TCG_TARGET_WORDS_BIGENDIAN
+#ifdef HOST_WORDS_BIGENDIAN
tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
#else
@@ -888,7 +888,7 @@ static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
tcg_target_long offset)
{
-#ifdef TCG_TARGET_WORDS_BIGENDIAN
+#ifdef HOST_WORDS_BIGENDIAN
tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
#else
@@ -2437,14 +2437,12 @@ static inline void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh,
tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
tcg_gen_mov_i32(rl, t);
tcg_temp_free_i32(t);
- } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_mulu2_i32) {
+ } else if (TCG_TARGET_REG_BITS == 32) {
TCGv_i32 t0 = tcg_temp_new_i32();
TCGv_i32 t1 = tcg_temp_new_i32();
TCGv_i32 t2 = tcg_temp_new_i32();
TCGv_i32 t3 = tcg_temp_new_i32();
- tcg_gen_op4_i32(INDEX_op_mulu2_i32, t0, t1, arg1, arg2);
- /* Allow the optimizer room to replace mulu2 with two moves. */
- tcg_gen_op0(INDEX_op_nop);
+ tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
/* Adjust for negative inputs. */
tcg_gen_sari_i32(t2, arg1, 31);
tcg_gen_sari_i32(t3, arg2, 31);
@@ -2522,26 +2520,6 @@ static inline void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh,
tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
tcg_gen_mov_i64(rl, t);
tcg_temp_free_i64(t);
- } else if (TCG_TARGET_HAS_mulu2_i64) {
- TCGv_i64 t0 = tcg_temp_new_i64();
- TCGv_i64 t1 = tcg_temp_new_i64();
- TCGv_i64 t2 = tcg_temp_new_i64();
- TCGv_i64 t3 = tcg_temp_new_i64();
- tcg_gen_op4_i64(INDEX_op_mulu2_i64, t0, t1, arg1, arg2);
- /* Allow the optimizer room to replace mulu2 with two moves. */
- tcg_gen_op0(INDEX_op_nop);
- /* Adjust for negative inputs. */
- tcg_gen_sari_i64(t2, arg1, 63);
- tcg_gen_sari_i64(t3, arg2, 63);
- tcg_gen_and_i64(t2, t2, arg2);
- tcg_gen_and_i64(t3, t3, arg1);
- tcg_gen_sub_i64(rh, t1, t2);
- tcg_gen_sub_i64(rh, rh, t3);
- tcg_gen_mov_i64(rl, t0);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
- tcg_temp_free_i64(t2);
- tcg_temp_free_i64(t3);
} else {
TCGv_i64 t0 = tcg_temp_new_i64();
int sizemask = 0;
@@ -2569,6 +2547,24 @@ static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
tcg_gen_mov_i64(rl, t);
tcg_temp_free_i64(t);
+ } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
+ TCGv_i64 t0 = tcg_temp_new_i64();
+ TCGv_i64 t1 = tcg_temp_new_i64();
+ TCGv_i64 t2 = tcg_temp_new_i64();
+ TCGv_i64 t3 = tcg_temp_new_i64();
+ tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
+ /* Adjust for negative inputs. */
+ tcg_gen_sari_i64(t2, arg1, 63);
+ tcg_gen_sari_i64(t3, arg2, 63);
+ tcg_gen_and_i64(t2, t2, arg2);
+ tcg_gen_and_i64(t3, t3, arg1);
+ tcg_gen_sub_i64(rh, t1, t2);
+ tcg_gen_sub_i64(rh, rh, t3);
+ tcg_gen_mov_i64(rl, t0);
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ tcg_temp_free_i64(t2);
+ tcg_temp_free_i64(t3);
} else {
TCGv_i64 t0 = tcg_temp_new_i64();
int sizemask = 0;