aboutsummaryrefslogtreecommitdiff
path: root/target/riscv/insn_trans
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2020-07-23 17:28:05 -0700
committerAlistair Francis <alistair.francis@wdc.com>2020-08-21 22:37:55 -0700
commitffe70e4dfc9cf2a6934e674b81b69c847b403c4b (patch)
treeb0b0d81063b280aa70f784b49e0e4d48d7b7dfc6 /target/riscv/insn_trans
parent00e925c56074f8c4923a087e2eecea8a3315ea40 (diff)
target/riscv: Check nanboxed inputs in trans_rvf.inc.c
If a 32-bit input is not properly nanboxed, then the input is replaced with the default qnan. The only inline expansion is for the sign-changing set of instructions: FSGNJ.S, FSGNJX.S, FSGNJN.S. Signed-off-by: Richard Henderson <richard.henderson@linaro.org> Reviewed-by: LIU Zhiwei <zhiwei_liu@c-sky.com> Message-Id: <20200724002807.441147-6-richard.henderson@linaro.org> Signed-off-by: Alistair Francis <alistair.francis@wdc.com>
Diffstat (limited to 'target/riscv/insn_trans')
-rw-r--r--target/riscv/insn_trans/trans_rvf.c.inc71
1 files changed, 55 insertions, 16 deletions
diff --git a/target/riscv/insn_trans/trans_rvf.c.inc b/target/riscv/insn_trans/trans_rvf.c.inc
index 264d3139f1..832f01db6f 100644
--- a/target/riscv/insn_trans/trans_rvf.c.inc
+++ b/target/riscv/insn_trans/trans_rvf.c.inc
@@ -161,47 +161,86 @@ static bool trans_fsgnj_s(DisasContext *ctx, arg_fsgnj_s *a)
{
REQUIRE_FPU;
REQUIRE_EXT(ctx, RVF);
+
if (a->rs1 == a->rs2) { /* FMOV */
- tcg_gen_mov_i64(cpu_fpr[a->rd], cpu_fpr[a->rs1]);
+ gen_check_nanbox_s(cpu_fpr[a->rd], cpu_fpr[a->rs1]);
} else { /* FSGNJ */
- tcg_gen_deposit_i64(cpu_fpr[a->rd], cpu_fpr[a->rs2], cpu_fpr[a->rs1],
- 0, 31);
+ TCGv_i64 rs1 = tcg_temp_new_i64();
+ TCGv_i64 rs2 = tcg_temp_new_i64();
+
+ gen_check_nanbox_s(rs1, cpu_fpr[a->rs1]);
+ gen_check_nanbox_s(rs2, cpu_fpr[a->rs2]);
+
+ /* This formulation retains the nanboxing of rs2. */
+ tcg_gen_deposit_i64(cpu_fpr[a->rd], rs2, rs1, 0, 31);
+ tcg_temp_free_i64(rs1);
+ tcg_temp_free_i64(rs2);
}
- gen_nanbox_s(cpu_fpr[a->rd], cpu_fpr[a->rd]);
mark_fs_dirty(ctx);
return true;
}
static bool trans_fsgnjn_s(DisasContext *ctx, arg_fsgnjn_s *a)
{
+ TCGv_i64 rs1, rs2, mask;
+
REQUIRE_FPU;
REQUIRE_EXT(ctx, RVF);
+
+ rs1 = tcg_temp_new_i64();
+ gen_check_nanbox_s(rs1, cpu_fpr[a->rs1]);
+
if (a->rs1 == a->rs2) { /* FNEG */
- tcg_gen_xori_i64(cpu_fpr[a->rd], cpu_fpr[a->rs1], INT32_MIN);
+ tcg_gen_xori_i64(cpu_fpr[a->rd], rs1, MAKE_64BIT_MASK(31, 1));
} else {
- TCGv_i64 t0 = tcg_temp_new_i64();
- tcg_gen_not_i64(t0, cpu_fpr[a->rs2]);
- tcg_gen_deposit_i64(cpu_fpr[a->rd], t0, cpu_fpr[a->rs1], 0, 31);
- tcg_temp_free_i64(t0);
+ rs2 = tcg_temp_new_i64();
+ gen_check_nanbox_s(rs2, cpu_fpr[a->rs2]);
+
+ /*
+ * Replace bit 31 in rs1 with inverse in rs2.
+ * This formulation retains the nanboxing of rs1.
+ */
+ mask = tcg_const_i64(~MAKE_64BIT_MASK(31, 1));
+ tcg_gen_nor_i64(rs2, rs2, mask);
+ tcg_gen_and_i64(rs1, mask, rs1);
+ tcg_gen_or_i64(cpu_fpr[a->rd], rs1, rs2);
+
+ tcg_temp_free_i64(mask);
+ tcg_temp_free_i64(rs2);
}
- gen_nanbox_s(cpu_fpr[a->rd], cpu_fpr[a->rd]);
+ tcg_temp_free_i64(rs1);
+
mark_fs_dirty(ctx);
return true;
}
static bool trans_fsgnjx_s(DisasContext *ctx, arg_fsgnjx_s *a)
{
+ TCGv_i64 rs1, rs2;
+
REQUIRE_FPU;
REQUIRE_EXT(ctx, RVF);
+
+ rs1 = tcg_temp_new_i64();
+ gen_check_nanbox_s(rs1, cpu_fpr[a->rs1]);
+
if (a->rs1 == a->rs2) { /* FABS */
- tcg_gen_andi_i64(cpu_fpr[a->rd], cpu_fpr[a->rs1], ~INT32_MIN);
+ tcg_gen_andi_i64(cpu_fpr[a->rd], rs1, ~MAKE_64BIT_MASK(31, 1));
} else {
- TCGv_i64 t0 = tcg_temp_new_i64();
- tcg_gen_andi_i64(t0, cpu_fpr[a->rs2], INT32_MIN);
- tcg_gen_xor_i64(cpu_fpr[a->rd], cpu_fpr[a->rs1], t0);
- tcg_temp_free_i64(t0);
+ rs2 = tcg_temp_new_i64();
+ gen_check_nanbox_s(rs2, cpu_fpr[a->rs2]);
+
+ /*
+ * Xor bit 31 in rs1 with that in rs2.
+ * This formulation retains the nanboxing of rs1.
+ */
+ tcg_gen_andi_i64(rs2, rs2, MAKE_64BIT_MASK(31, 1));
+ tcg_gen_xor_i64(cpu_fpr[a->rd], rs1, rs2);
+
+ tcg_temp_free_i64(rs2);
}
- gen_nanbox_s(cpu_fpr[a->rd], cpu_fpr[a->rd]);
+ tcg_temp_free_i64(rs1);
+
mark_fs_dirty(ctx);
return true;
}