aboutsummaryrefslogtreecommitdiff
path: root/tcg
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2018-11-20 08:53:34 +0100
committerRichard Henderson <richard.henderson@linaro.org>2018-12-17 06:04:44 +0300
commit6498594c8eda83c5f5915afc34bd03396f8de6df (patch)
treea11fd2c30953ce5e42eb85dfe9e0cb77ab1b39d1 /tcg
parent9e821eab0ab708add35fa0446d880086e845ee3e (diff)
tcg/optimize: Optimize bswap
Somehow we forgot these operations, once upon a time. This will allow immediate stores to have their bswap optimized away. Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Diffstat (limited to 'tcg')
-rw-r--r--tcg/optimize.c12
1 files changed, 12 insertions, 0 deletions
diff --git a/tcg/optimize.c b/tcg/optimize.c
index 5dbe11c3c8..6b98ec13e6 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -353,6 +353,15 @@ static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
CASE_OP_32_64(ext16u):
return (uint16_t)x;
+ CASE_OP_32_64(bswap16):
+ return bswap16(x);
+
+ CASE_OP_32_64(bswap32):
+ return bswap32(x);
+
+ case INDEX_op_bswap64_i64:
+ return bswap64(x);
+
case INDEX_op_ext_i32_i64:
case INDEX_op_ext32s_i64:
return (int32_t)x;
@@ -1105,6 +1114,9 @@ void tcg_optimize(TCGContext *s)
CASE_OP_32_64(ext16s):
CASE_OP_32_64(ext16u):
CASE_OP_32_64(ctpop):
+ CASE_OP_32_64(bswap16):
+ CASE_OP_32_64(bswap32):
+ case INDEX_op_bswap64_i64:
case INDEX_op_ext32s_i64:
case INDEX_op_ext32u_i64:
case INDEX_op_ext_i32_i64: