diff options
author | Peter Maydell <peter.maydell@linaro.org> | 2021-03-08 20:07:37 +0000 |
---|---|---|
committer | Peter Maydell <peter.maydell@linaro.org> | 2021-03-08 20:07:37 +0000 |
commit | 74fd46ed44f60a230804dc1050bae76eb9420ecb (patch) | |
tree | 413bc134d651099728aa85bf15f0f79f1655b406 /tcg/tci | |
parent | 229a834518b950d56fd1bc94923276504d0ee9d4 (diff) | |
parent | 6cc9d67c6f682cf04eea2d6e64a252b63a7eccdf (diff) |
Merge remote-tracking branch 'remotes/rth-gitlab/tags/pull-tcg-20210306' into staging
TCI build fix and cleanup
Streamline tb_lookup
Fixes for tcg/aarch64
# gpg: Signature made Sat 06 Mar 2021 21:34:46 GMT
# gpg: using RSA key 7A481E78868B4DB6A85A05C064DF38E8AF7E215F
# gpg: issuer "richard.henderson@linaro.org"
# gpg: Good signature from "Richard Henderson <richard.henderson@linaro.org>" [full]
# Primary key fingerprint: 7A48 1E78 868B 4DB6 A85A 05C0 64DF 38E8 AF7E 215F
* remotes/rth-gitlab/tags/pull-tcg-20210306: (27 commits)
accel/tcg: Precompute curr_cflags into cpu->tcg_cflags
include/exec: lightly re-arrange TranslationBlock
accel/tcg: drop the use of CF_HASH_MASK and rename params
accel/tcg: move CF_CLUSTER calculation to curr_cflags
accel/tcg: rename tb_lookup__cpu_state and hoist state extraction
tcg/tci: Merge mov, not and neg operations
tcg/tci: Merge bswap operations
tcg/tci: Merge extension operations
tcg/tci: Merge basic arithmetic operations
tcg/tci: Reduce use of tci_read_r64
tcg/tci: Remove tci_read_r32s
tcg/tci: Remove tci_read_r32
tcg/tci: Remove tci_read_r16s
tcg/tci: Remove tci_read_r16
tcg/tci: Remove tci_read_r8s
tcg/tci: Remove tci_read_r8
tcg/tci: Merge identical cases in generation (load/store opcodes)
tcg/tci: Merge identical cases in generation (conditional opcodes)
tcg/tci: Merge identical cases in generation (deposit opcode)
tcg/tci: Merge identical cases in generation (exchange opcodes)
...
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
Diffstat (limited to 'tcg/tci')
-rw-r--r-- | tcg/tci/tcg-target.c.inc | 204 |
1 files changed, 73 insertions, 131 deletions
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc index feac4659cc..c79f9c32d8 100644 --- a/tcg/tci/tcg-target.c.inc +++ b/tcg/tci/tcg-target.c.inc @@ -380,6 +380,18 @@ static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg) old_code_ptr[1] = s->code_ptr - old_code_ptr; } +#if TCG_TARGET_REG_BITS == 64 +# define CASE_32_64(x) \ + case glue(glue(INDEX_op_, x), _i64): \ + case glue(glue(INDEX_op_, x), _i32): +# define CASE_64(x) \ + case glue(glue(INDEX_op_, x), _i64): +#else +# define CASE_32_64(x) \ + case glue(glue(INDEX_op_, x), _i32): +# define CASE_64(x) +#endif + static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, const int *const_args) { @@ -391,6 +403,7 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, case INDEX_op_exit_tb: tcg_out64(s, args[0]); break; + case INDEX_op_goto_tb: if (s->tb_jmp_insn_offset) { /* Direct jump method. */ @@ -404,15 +417,18 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, } set_jmp_reset_offset(s, args[0]); break; + case INDEX_op_br: tci_out_label(s, arg_label(args[0])); break; - case INDEX_op_setcond_i32: + + CASE_32_64(setcond) tcg_out_r(s, args[0]); tcg_out_r(s, args[1]); tcg_out_r(s, args[2]); tcg_out8(s, args[3]); /* condition */ break; + #if TCG_TARGET_REG_BITS == 32 case INDEX_op_setcond2_i32: /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */ @@ -423,95 +439,54 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, tcg_out_r(s, args[4]); tcg_out8(s, args[5]); /* condition */ break; -#elif TCG_TARGET_REG_BITS == 64 - case INDEX_op_setcond_i64: - tcg_out_r(s, args[0]); - tcg_out_r(s, args[1]); - tcg_out_r(s, args[2]); - tcg_out8(s, args[3]); /* condition */ - break; #endif - case INDEX_op_ld8u_i32: - case INDEX_op_ld8s_i32: - case INDEX_op_ld16u_i32: - case INDEX_op_ld16s_i32: + + CASE_32_64(ld8u) + CASE_32_64(ld8s) + CASE_32_64(ld16u) + CASE_32_64(ld16s) case INDEX_op_ld_i32: - case INDEX_op_st8_i32: - case INDEX_op_st16_i32: + CASE_64(ld32u) + CASE_64(ld32s) + CASE_64(ld) + CASE_32_64(st8) + CASE_32_64(st16) case INDEX_op_st_i32: - case INDEX_op_ld8u_i64: - case INDEX_op_ld8s_i64: - case INDEX_op_ld16u_i64: - case INDEX_op_ld16s_i64: - case INDEX_op_ld32u_i64: - case INDEX_op_ld32s_i64: - case INDEX_op_ld_i64: - case INDEX_op_st8_i64: - case INDEX_op_st16_i64: - case INDEX_op_st32_i64: - case INDEX_op_st_i64: + CASE_64(st32) + CASE_64(st) stack_bounds_check(args[1], args[2]); tcg_out_r(s, args[0]); tcg_out_r(s, args[1]); tcg_debug_assert(args[2] == (int32_t)args[2]); tcg_out32(s, args[2]); break; - case INDEX_op_add_i32: - case INDEX_op_sub_i32: - case INDEX_op_mul_i32: - case INDEX_op_and_i32: - case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */ - case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */ - case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */ - case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */ - case INDEX_op_or_i32: - case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */ - case INDEX_op_xor_i32: - case INDEX_op_shl_i32: - case INDEX_op_shr_i32: - case INDEX_op_sar_i32: - case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */ - case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */ - tcg_out_r(s, args[0]); - tcg_out_r(s, args[1]); - tcg_out_r(s, args[2]); - break; - case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */ - tcg_out_r(s, args[0]); - tcg_out_r(s, args[1]); - tcg_out_r(s, args[2]); - tcg_debug_assert(args[3] <= UINT8_MAX); - tcg_out8(s, args[3]); - tcg_debug_assert(args[4] <= UINT8_MAX); - tcg_out8(s, args[4]); - break; -#if TCG_TARGET_REG_BITS == 64 - case INDEX_op_add_i64: - case INDEX_op_sub_i64: - case INDEX_op_mul_i64: - case INDEX_op_and_i64: - case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */ - case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */ - case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */ - case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */ - case INDEX_op_or_i64: - case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */ - case INDEX_op_xor_i64: - case INDEX_op_shl_i64: - case INDEX_op_shr_i64: - case INDEX_op_sar_i64: - case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */ - case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */ - case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */ - case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */ - case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */ - case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */ + CASE_32_64(add) + CASE_32_64(sub) + CASE_32_64(mul) + CASE_32_64(and) + CASE_32_64(or) + CASE_32_64(xor) + CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */ + CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */ + CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */ + CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */ + CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */ + CASE_32_64(shl) + CASE_32_64(shr) + CASE_32_64(sar) + CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */ + CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */ + CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */ + CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */ + CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */ + CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */ tcg_out_r(s, args[0]); tcg_out_r(s, args[1]); tcg_out_r(s, args[2]); break; - case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */ + + CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */ tcg_out_r(s, args[0]); tcg_out_r(s, args[1]); tcg_out_r(s, args[2]); @@ -520,45 +495,31 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, tcg_debug_assert(args[4] <= UINT8_MAX); tcg_out8(s, args[4]); break; - case INDEX_op_brcond_i64: + + CASE_32_64(brcond) tcg_out_r(s, args[0]); tcg_out_r(s, args[1]); tcg_out8(s, args[2]); /* condition */ tci_out_label(s, arg_label(args[3])); break; - case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */ - case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */ - case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */ - case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */ - case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */ - case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */ - case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */ - case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */ - case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */ - case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */ - case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */ - case INDEX_op_ext_i32_i64: - case INDEX_op_extu_i32_i64: -#endif /* TCG_TARGET_REG_BITS == 64 */ - case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */ - case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */ - case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */ - case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */ - case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */ - case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */ - case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */ - case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */ - tcg_out_r(s, args[0]); - tcg_out_r(s, args[1]); - break; - case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */ - case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */ - case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */ - case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */ + + CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */ + CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */ + CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */ + CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */ + CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */ + CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */ + CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */ + CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */ + CASE_64(ext_i32) + CASE_64(extu_i32) + CASE_32_64(bswap16) /* Optional (TCG_TARGET_HAS_bswap16_*). */ + CASE_32_64(bswap32) /* Optional (TCG_TARGET_HAS_bswap32_*). */ + CASE_64(bswap64) /* Optional (TCG_TARGET_HAS_bswap64_i64). */ tcg_out_r(s, args[0]); tcg_out_r(s, args[1]); - tcg_out_r(s, args[2]); break; + #if TCG_TARGET_REG_BITS == 32 case INDEX_op_add2_i32: case INDEX_op_sub2_i32: @@ -584,31 +545,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, tcg_out_r(s, args[3]); break; #endif - case INDEX_op_brcond_i32: - tcg_out_r(s, args[0]); - tcg_out_r(s, args[1]); - tcg_out8(s, args[2]); /* condition */ - tci_out_label(s, arg_label(args[3])); - break; + case INDEX_op_qemu_ld_i32: - tcg_out_r(s, *args++); - tcg_out_r(s, *args++); - if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) { - tcg_out_r(s, *args++); - } - tcg_out_i(s, *args++); - break; - case INDEX_op_qemu_ld_i64: - tcg_out_r(s, *args++); - if (TCG_TARGET_REG_BITS == 32) { - tcg_out_r(s, *args++); - } - tcg_out_r(s, *args++); - if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) { - tcg_out_r(s, *args++); - } - tcg_out_i(s, *args++); - break; case INDEX_op_qemu_st_i32: tcg_out_r(s, *args++); tcg_out_r(s, *args++); @@ -617,6 +555,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, } tcg_out_i(s, *args++); break; + + case INDEX_op_qemu_ld_i64: case INDEX_op_qemu_st_i64: tcg_out_r(s, *args++); if (TCG_TARGET_REG_BITS == 32) { @@ -628,8 +568,10 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args, } tcg_out_i(s, *args++); break; + case INDEX_op_mb: break; + case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ case INDEX_op_mov_i64: case INDEX_op_call: /* Always emitted via tcg_out_call. */ |