aboutsummaryrefslogtreecommitdiff
path: root/tcg/sparc/tcg-target.inc.c
diff options
context:
space:
mode:
authorPeter Maydell <peter.maydell@linaro.org>2019-06-10 16:09:19 +0100
committerPeter Maydell <peter.maydell@linaro.org>2019-06-10 16:09:19 +0100
commita578cdfbdd8f9beff5ced52b7826ddb1669abbbf (patch)
tree90697278e6aefd0b91858c403ddb5670f6bdf053 /tcg/sparc/tcg-target.inc.c
parent19735c837ae2056b4651720290eda59498eca65a (diff)
parent43b3952dea0f763ceeaa2f119c473b5cc6d29c90 (diff)
Merge remote-tracking branch 'remotes/rth/tags/pull-tcg-20190610' into staging
Move softmmu tlb into CPUNegativeOffsetState # gpg: Signature made Mon 10 Jun 2019 15:07:55 BST # gpg: using RSA key 7A481E78868B4DB6A85A05C064DF38E8AF7E215F # gpg: issuer "richard.henderson@linaro.org" # gpg: Good signature from "Richard Henderson <richard.henderson@linaro.org>" [full] # Primary key fingerprint: 7A48 1E78 868B 4DB6 A85A 05C0 64DF 38E8 AF7E 215F * remotes/rth/tags/pull-tcg-20190610: (39 commits) tcg/arm: Remove mostly unreachable tlb special case tcg/arm: Use LDRD to load tlb mask+table tcg/aarch64: Use LDP to load tlb mask+table cpu: Remove CPU_COMMON cpu: Move the softmmu tlb to CPUNegativeOffsetState cpu: Move icount_decr to CPUNegativeOffsetState cpu: Introduce CPUNegativeOffsetState cpu: Introduce cpu_set_cpustate_pointers cpu: Move ENV_OFFSET to exec/gen-icount.h target/xtensa: Use env_cpu, env_archcpu target/unicore32: Use env_cpu, env_archcpu target/tricore: Use env_cpu target/tilegx: Use env_cpu target/sparc: Use env_cpu, env_archcpu target/sh4: Use env_cpu, env_archcpu target/s390x: Use env_cpu, env_archcpu target/riscv: Use env_cpu, env_archcpu target/ppc: Use env_cpu, env_archcpu target/openrisc: Use env_cpu, env_archcpu target/nios2: Use env_cpu, env_archcpu ... Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
Diffstat (limited to 'tcg/sparc/tcg-target.inc.c')
-rw-r--r--tcg/sparc/tcg-target.inc.c40
1 files changed, 10 insertions, 30 deletions
diff --git a/tcg/sparc/tcg-target.inc.c b/tcg/sparc/tcg-target.inc.c
index 83295955a7..10b1cea63b 100644
--- a/tcg/sparc/tcg-target.inc.c
+++ b/tcg/sparc/tcg-target.inc.c
@@ -1062,6 +1062,11 @@ static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
}
#if defined(CONFIG_SOFTMMU)
+
+/* We expect to use a 13-bit negative offset from ENV. */
+QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) > 0);
+QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) < -(1 << 12));
+
/* Perform the TLB load and compare.
Inputs:
@@ -1075,20 +1080,12 @@ static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
The result of the TLB comparison is in %[ix]cc. The sanitized address
is in the returned register, maybe %o0. The TLB addend is in %o1. */
-/* We expect tlb_mask to be before tlb_table. */
-QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table) <
- offsetof(CPUArchState, tlb_mask));
-
-/* We expect tlb_mask to be "near" tlb_table. */
-QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table) -
- offsetof(CPUArchState, tlb_mask) >= (1 << 13));
-
static TCGReg tcg_out_tlb_load(TCGContext *s, TCGReg addr, int mem_index,
TCGMemOp opc, int which)
{
- int mask_off = offsetof(CPUArchState, tlb_mask[mem_index]);
- int table_off = offsetof(CPUArchState, tlb_table[mem_index]);
- TCGReg base = TCG_AREG0;
+ int fast_off = TLB_MASK_TABLE_OFS(mem_index);
+ int mask_off = fast_off + offsetof(CPUTLBDescFast, mask);
+ int table_off = fast_off + offsetof(CPUTLBDescFast, table);
const TCGReg r0 = TCG_REG_O0;
const TCGReg r1 = TCG_REG_O1;
const TCGReg r2 = TCG_REG_O2;
@@ -1096,26 +1093,9 @@ static TCGReg tcg_out_tlb_load(TCGContext *s, TCGReg addr, int mem_index,
unsigned a_bits = get_alignment_bits(opc);
tcg_target_long compare_mask;
- if (!check_fit_i32(table_off, 13)) {
- int table_hi;
-
- base = r1;
- if (table_off <= 2 * 0xfff) {
- table_hi = 0xfff;
- tcg_out_arithi(s, base, TCG_AREG0, table_hi, ARITH_ADD);
- } else {
- table_hi = table_off & ~0x3ff;
- tcg_out_sethi(s, base, table_hi);
- tcg_out_arith(s, base, TCG_AREG0, base, ARITH_ADD);
- }
- mask_off -= table_hi;
- table_off -= table_hi;
- tcg_debug_assert(check_fit_i32(mask_off, 13));
- }
-
/* Load tlb_mask[mmu_idx] and tlb_table[mmu_idx]. */
- tcg_out_ld(s, TCG_TYPE_PTR, r0, base, mask_off);
- tcg_out_ld(s, TCG_TYPE_PTR, r1, base, table_off);
+ tcg_out_ld(s, TCG_TYPE_PTR, r0, TCG_AREG0, mask_off);
+ tcg_out_ld(s, TCG_TYPE_PTR, r1, TCG_AREG0, table_off);
/* Extract the page index, shifted into place for tlb index. */
tcg_out_arithi(s, r2, addr, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,