aboutsummaryrefslogtreecommitdiff
path: root/tcg/optimize.c
diff options
context:
space:
mode:
Diffstat (limited to 'tcg/optimize.c')
-rw-r--r--tcg/optimize.c99
1 files changed, 99 insertions, 0 deletions
diff --git a/tcg/optimize.c b/tcg/optimize.c
index ba16ec27e2..e9ef16b3c6 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -1851,6 +1851,11 @@ static bool fold_movcond(OptContext *ctx, TCGOp *op)
{
int i;
+ /* If true and false values are the same, eliminate the cmp. */
+ if (args_are_copies(op->args[3], op->args[4])) {
+ return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[3]);
+ }
+
/*
* Canonicalize the "false" input reg to match the destination reg so
* that the tcg backend can implement a "move if true" operation.
@@ -2417,6 +2422,36 @@ static bool fold_setcond2(OptContext *ctx, TCGOp *op)
return tcg_opt_gen_movi(ctx, op, op->args[0], i);
}
+static bool fold_cmp_vec(OptContext *ctx, TCGOp *op)
+{
+ /* Canonicalize the comparison to put immediate second. */
+ if (swap_commutative(NO_DEST, &op->args[1], &op->args[2])) {
+ op->args[3] = tcg_swap_cond(op->args[3]);
+ }
+ return false;
+}
+
+static bool fold_cmpsel_vec(OptContext *ctx, TCGOp *op)
+{
+ /* If true and false values are the same, eliminate the cmp. */
+ if (args_are_copies(op->args[3], op->args[4])) {
+ return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[3]);
+ }
+
+ /* Canonicalize the comparison to put immediate second. */
+ if (swap_commutative(NO_DEST, &op->args[1], &op->args[2])) {
+ op->args[5] = tcg_swap_cond(op->args[5]);
+ }
+ /*
+ * Canonicalize the "false" input reg to match the destination,
+ * so that the tcg backend can implement "move if true".
+ */
+ if (swap_commutative(op->args[0], &op->args[4], &op->args[3])) {
+ op->args[5] = tcg_invert_cond(op->args[5]);
+ }
+ return false;
+}
+
static bool fold_sextract(OptContext *ctx, TCGOp *op)
{
uint64_t z_mask, s_mask, s_mask_old;
@@ -2702,6 +2737,61 @@ static bool fold_xor(OptContext *ctx, TCGOp *op)
return fold_masks(ctx, op);
}
+static bool fold_bitsel_vec(OptContext *ctx, TCGOp *op)
+{
+ /* If true and false values are the same, eliminate the cmp. */
+ if (args_are_copies(op->args[2], op->args[3])) {
+ return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[2]);
+ }
+
+ if (arg_is_const(op->args[2]) && arg_is_const(op->args[3])) {
+ uint64_t tv = arg_info(op->args[2])->val;
+ uint64_t fv = arg_info(op->args[3])->val;
+
+ if (tv == -1 && fv == 0) {
+ return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]);
+ }
+ if (tv == 0 && fv == -1) {
+ if (TCG_TARGET_HAS_not_vec) {
+ op->opc = INDEX_op_not_vec;
+ return fold_not(ctx, op);
+ } else {
+ op->opc = INDEX_op_xor_vec;
+ op->args[2] = arg_new_constant(ctx, -1);
+ return fold_xor(ctx, op);
+ }
+ }
+ }
+ if (arg_is_const(op->args[2])) {
+ uint64_t tv = arg_info(op->args[2])->val;
+ if (tv == -1) {
+ op->opc = INDEX_op_or_vec;
+ op->args[2] = op->args[3];
+ return fold_or(ctx, op);
+ }
+ if (tv == 0 && TCG_TARGET_HAS_andc_vec) {
+ op->opc = INDEX_op_andc_vec;
+ op->args[2] = op->args[1];
+ op->args[1] = op->args[3];
+ return fold_andc(ctx, op);
+ }
+ }
+ if (arg_is_const(op->args[3])) {
+ uint64_t fv = arg_info(op->args[3])->val;
+ if (fv == 0) {
+ op->opc = INDEX_op_and_vec;
+ return fold_and(ctx, op);
+ }
+ if (fv == -1 && TCG_TARGET_HAS_orc_vec) {
+ op->opc = INDEX_op_orc_vec;
+ op->args[2] = op->args[1];
+ op->args[1] = op->args[3];
+ return fold_orc(ctx, op);
+ }
+ }
+ return false;
+}
+
/* Propagate constants and copies, fold constant expressions. */
void tcg_optimize(TCGContext *s)
{
@@ -2923,6 +3013,15 @@ void tcg_optimize(TCGContext *s)
case INDEX_op_setcond2_i32:
done = fold_setcond2(&ctx, op);
break;
+ case INDEX_op_cmp_vec:
+ done = fold_cmp_vec(&ctx, op);
+ break;
+ case INDEX_op_cmpsel_vec:
+ done = fold_cmpsel_vec(&ctx, op);
+ break;
+ case INDEX_op_bitsel_vec:
+ done = fold_bitsel_vec(&ctx, op);
+ break;
CASE_OP_32_64(sextract):
done = fold_sextract(&ctx, op);
break;