From 0d6e6cb779ea1615b094f35bf3189ae6fd68cd95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alex=20Benn=C3=A9e?= Date: Sat, 13 Feb 2021 13:03:11 +0000 Subject: accel/tcg/plugin-gen: fix the call signature for inline callbacks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A recent change to the handling of constants in TCG changed the pattern of ops emitted for a constant add. We no longer emit a mov and the constant can be applied directly to the TCG_op_add arguments. This was causing SEGVs when running the insn plugin with arg=inline. Fix this by updating copy_add_i64 to do the right thing while also adding a comment at the top of the append section as an aide memoir if something like this happens again. Signed-off-by: Alex Bennée Reviewed-by: Richard Henderson Cc: Emilio G. Cota Message-Id: <20210213130325.14781-10-alex.bennee@linaro.org> --- accel/tcg/plugin-gen.c | 32 +++++++++++--------------------- 1 file changed, 11 insertions(+), 21 deletions(-) (limited to 'accel') diff --git a/accel/tcg/plugin-gen.c b/accel/tcg/plugin-gen.c index e5dc9d0ca9..8a1bb801e0 100644 --- a/accel/tcg/plugin-gen.c +++ b/accel/tcg/plugin-gen.c @@ -320,22 +320,6 @@ static TCGOp *copy_const_ptr(TCGOp **begin_op, TCGOp *op, void *ptr) return op; } -static TCGOp *copy_const_i64(TCGOp **begin_op, TCGOp *op, uint64_t v) -{ - if (TCG_TARGET_REG_BITS == 32) { - /* 2x mov_i32 */ - op = copy_op(begin_op, op, INDEX_op_mov_i32); - op->args[1] = tcgv_i32_arg(tcg_constant_i32(v)); - op = copy_op(begin_op, op, INDEX_op_mov_i32); - op->args[1] = tcgv_i32_arg(tcg_constant_i32(v >> 32)); - } else { - /* mov_i64 */ - op = copy_op(begin_op, op, INDEX_op_mov_i64); - op->args[1] = tcgv_i64_arg(tcg_constant_i64(v)); - } - return op; -} - static TCGOp *copy_extu_tl_i64(TCGOp **begin_op, TCGOp *op) { if (TARGET_LONG_BITS == 32) { @@ -374,14 +358,17 @@ static TCGOp *copy_st_i64(TCGOp **begin_op, TCGOp *op) return op; } -static TCGOp *copy_add_i64(TCGOp **begin_op, TCGOp *op) +static TCGOp *copy_add_i64(TCGOp **begin_op, TCGOp *op, uint64_t v) { if (TCG_TARGET_REG_BITS == 32) { /* all 32-bit backends must implement add2_i32 */ g_assert(TCG_TARGET_HAS_add2_i32); op = copy_op(begin_op, op, INDEX_op_add2_i32); + op->args[4] = tcgv_i32_arg(tcg_constant_i32(v)); + op->args[5] = tcgv_i32_arg(tcg_constant_i32(v >> 32)); } else { op = copy_op(begin_op, op, INDEX_op_add_i64); + op->args[2] = tcgv_i64_arg(tcg_constant_i64(v)); } return op; } @@ -431,6 +418,12 @@ static TCGOp *copy_call(TCGOp **begin_op, TCGOp *op, void *empty_func, return op; } +/* + * When we append/replace ops here we are sensitive to changing patterns of + * TCGOps generated by the tcg_gen_FOO calls when we generated the + * empty callbacks. This will assert very quickly in a debug build as + * we assert the ops we are replacing are the correct ones. + */ static TCGOp *append_udata_cb(const struct qemu_plugin_dyn_cb *cb, TCGOp *begin_op, TCGOp *op, int *cb_idx) { @@ -462,11 +455,8 @@ static TCGOp *append_inline_cb(const struct qemu_plugin_dyn_cb *cb, /* ld_i64 */ op = copy_ld_i64(&begin_op, op); - /* const_i64 */ - op = copy_const_i64(&begin_op, op, cb->inline_insn.imm); - /* add_i64 */ - op = copy_add_i64(&begin_op, op); + op = copy_add_i64(&begin_op, op, cb->inline_insn.imm); /* st_i64 */ op = copy_st_i64(&begin_op, op); -- cgit v1.2.3