aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPeter Maydell <peter.maydell@linaro.org>2015-09-03 13:05:45 +0100
committerPeter Maydell <peter.maydell@linaro.org>2015-09-03 13:05:45 +0100
commit561578c2a82292ddf55737791d2838b797f49f35 (patch)
tree5eecb3e2d93d127d65f586304e44a9a380e1fb45
parentfc8135a46d095f865d285e697a874f617bfeeb90 (diff)
parent08b0b23be639b955e5e3d84dc42fa4e5ce6a8728 (diff)
Merge remote-tracking branch 'remotes/rth/tags/pull-tcg-20150902' into staging
queued tcg patches # gpg: Signature made Wed 02 Sep 2015 22:35:37 BST using RSA key ID 4DD0279B # gpg: Good signature from "Richard Henderson <rth7680@gmail.com>" # gpg: aka "Richard Henderson <rth@redhat.com>" # gpg: aka "Richard Henderson <rth@twiddle.net>" * remotes/rth/tags/pull-tcg-20150902: tcg/i386: omit a few REXW prefixes in softmmu code tcg/aarch64: Fix tcg_out_qemu_{ld, st} for guest_base == 0 Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
-rw-r--r--tcg/aarch64/tcg-target.c27
-rw-r--r--tcg/i386/tcg-target.c15
2 files changed, 29 insertions, 13 deletions
diff --git a/tcg/aarch64/tcg-target.c b/tcg/aarch64/tcg-target.c
index 01ae610cd7..0ed10a9741 100644
--- a/tcg/aarch64/tcg-target.c
+++ b/tcg/aarch64/tcg-target.c
@@ -56,6 +56,11 @@ static const int tcg_target_call_oarg_regs[1] = {
#define TCG_REG_TMP TCG_REG_X30
#ifndef CONFIG_SOFTMMU
+/* Note that XZR cannot be encoded in the address base register slot,
+ as that actaully encodes SP. So if we need to zero-extend the guest
+ address, via the address index register slot, we need to load even
+ a zero guest base into a register. */
+#define USE_GUEST_BASE (guest_base != 0 || TARGET_LONG_BITS == 32)
#define TCG_REG_GUEST_BASE TCG_REG_X28
#endif
@@ -1224,9 +1229,13 @@ static void tcg_out_qemu_ld(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
add_qemu_ldst_label(s, true, oi, ext, data_reg, addr_reg,
s->code_ptr, label_ptr);
#else /* !CONFIG_SOFTMMU */
- tcg_out_qemu_ld_direct(s, memop, ext, data_reg,
- guest_base ? TCG_REG_GUEST_BASE : TCG_REG_XZR,
- otype, addr_reg);
+ if (USE_GUEST_BASE) {
+ tcg_out_qemu_ld_direct(s, memop, ext, data_reg,
+ TCG_REG_GUEST_BASE, otype, addr_reg);
+ } else {
+ tcg_out_qemu_ld_direct(s, memop, ext, data_reg,
+ addr_reg, TCG_TYPE_I64, TCG_REG_XZR);
+ }
#endif /* CONFIG_SOFTMMU */
}
@@ -1245,9 +1254,13 @@ static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
add_qemu_ldst_label(s, false, oi, (memop & MO_SIZE)== MO_64,
data_reg, addr_reg, s->code_ptr, label_ptr);
#else /* !CONFIG_SOFTMMU */
- tcg_out_qemu_st_direct(s, memop, data_reg,
- guest_base ? TCG_REG_GUEST_BASE : TCG_REG_XZR,
- otype, addr_reg);
+ if (USE_GUEST_BASE) {
+ tcg_out_qemu_st_direct(s, memop, data_reg,
+ TCG_REG_GUEST_BASE, otype, addr_reg);
+ } else {
+ tcg_out_qemu_st_direct(s, memop, data_reg,
+ addr_reg, TCG_TYPE_I64, TCG_REG_XZR);
+ }
#endif /* CONFIG_SOFTMMU */
}
@@ -1806,7 +1819,7 @@ static void tcg_target_qemu_prologue(TCGContext *s)
CPU_TEMP_BUF_NLONGS * sizeof(long));
#if !defined(CONFIG_SOFTMMU)
- if (guest_base) {
+ if (USE_GUEST_BASE) {
tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_GUEST_BASE, guest_base);
tcg_regset_set_reg(s->reserved_regs, TCG_REG_GUEST_BASE);
}
diff --git a/tcg/i386/tcg-target.c b/tcg/i386/tcg-target.c
index d2adbc4d17..9187d34caf 100644
--- a/tcg/i386/tcg-target.c
+++ b/tcg/i386/tcg-target.c
@@ -1178,8 +1178,8 @@ static inline void tcg_out_tlb_load(TCGContext *s, TCGReg addrlo, TCGReg addrhi,
const TCGReg r0 = TCG_REG_L0;
const TCGReg r1 = TCG_REG_L1;
TCGType ttype = TCG_TYPE_I32;
- TCGType htype = TCG_TYPE_I32;
- int trexw = 0, hrexw = 0;
+ TCGType tlbtype = TCG_TYPE_I32;
+ int trexw = 0, hrexw = 0, tlbrexw = 0;
int s_mask = (1 << (opc & MO_SIZE)) - 1;
bool aligned = (opc & MO_AMASK) == MO_ALIGN || s_mask == 0;
@@ -1189,12 +1189,15 @@ static inline void tcg_out_tlb_load(TCGContext *s, TCGReg addrlo, TCGReg addrhi,
trexw = P_REXW;
}
if (TCG_TYPE_PTR == TCG_TYPE_I64) {
- htype = TCG_TYPE_I64;
hrexw = P_REXW;
+ if (TARGET_PAGE_BITS + CPU_TLB_BITS > 32) {
+ tlbtype = TCG_TYPE_I64;
+ tlbrexw = P_REXW;
+ }
}
}
- tcg_out_mov(s, htype, r0, addrlo);
+ tcg_out_mov(s, tlbtype, r0, addrlo);
if (aligned) {
tcg_out_mov(s, ttype, r1, addrlo);
} else {
@@ -1203,12 +1206,12 @@ static inline void tcg_out_tlb_load(TCGContext *s, TCGReg addrlo, TCGReg addrhi,
tcg_out_modrm_offset(s, OPC_LEA + trexw, r1, addrlo, s_mask);
}
- tcg_out_shifti(s, SHIFT_SHR + hrexw, r0,
+ tcg_out_shifti(s, SHIFT_SHR + tlbrexw, r0,
TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
tgen_arithi(s, ARITH_AND + trexw, r1,
TARGET_PAGE_MASK | (aligned ? s_mask : 0), 0);
- tgen_arithi(s, ARITH_AND + hrexw, r0,
+ tgen_arithi(s, ARITH_AND + tlbrexw, r0,
(CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS, 0);
tcg_out_modrm_sib_offset(s, OPC_LEA + hrexw, r0, TCG_AREG0, r0, 0,