aboutsummaryrefslogtreecommitdiff
path: root/target/arm/tcg/mte_helper.c
diff options
context:
space:
mode:
Diffstat (limited to 'target/arm/tcg/mte_helper.c')
-rw-r--r--target/arm/tcg/mte_helper.c18
1 files changed, 18 insertions, 0 deletions
diff --git a/target/arm/tcg/mte_helper.c b/target/arm/tcg/mte_helper.c
index a4f3f92bc0..9c64def081 100644
--- a/target/arm/tcg/mte_helper.c
+++ b/target/arm/tcg/mte_helper.c
@@ -785,6 +785,24 @@ uint64_t mte_check(CPUARMState *env, uint32_t desc, uint64_t ptr, uintptr_t ra)
uint64_t HELPER(mte_check)(CPUARMState *env, uint32_t desc, uint64_t ptr)
{
+ /*
+ * R_XCHFJ: Alignment check not caused by memory type is priority 1,
+ * higher than any translation fault. When MTE is disabled, tcg
+ * performs the alignment check during the code generated for the
+ * memory access. With MTE enabled, we must check this here before
+ * raising any translation fault in allocation_tag_mem.
+ */
+ unsigned align = FIELD_EX32(desc, MTEDESC, ALIGN);
+ if (unlikely(align)) {
+ align = (1u << align) - 1;
+ if (unlikely(ptr & align)) {
+ int idx = FIELD_EX32(desc, MTEDESC, MIDX);
+ bool w = FIELD_EX32(desc, MTEDESC, WRITE);
+ MMUAccessType type = w ? MMU_DATA_STORE : MMU_DATA_LOAD;
+ arm_cpu_do_unaligned_access(env_cpu(env), ptr, type, idx, GETPC());
+ }
+ }
+
return mte_check(env, desc, ptr, GETPC());
}