aboutsummaryrefslogtreecommitdiff
path: root/target/riscv/vector_helper.c
diff options
context:
space:
mode:
authorFrank Chang <frank.chang@sifive.com>2021-12-10 15:56:07 +0800
committerAlistair Francis <alistair.francis@wdc.com>2021-12-20 14:51:36 +1000
commit08b9d0ed4aaff095a940280eba1321e3414dd5ac (patch)
tree62612512e673facfba13475625dd7112d5b7e2d3 /target/riscv/vector_helper.c
parent79556fb6fa067922fb11d2a1209852900109c7ae (diff)
target/riscv: rvv-1.0: index load and store instructions
Signed-off-by: Frank Chang <frank.chang@sifive.com> Reviewed-by: Alistair Francis <alistair.francis@wdc.com> Message-Id: <20211210075704.23951-22-frank.chang@sifive.com> Signed-off-by: Alistair Francis <alistair.francis@wdc.com>
Diffstat (limited to 'target/riscv/vector_helper.c')
-rw-r--r--target/riscv/vector_helper.c98
1 files changed, 42 insertions, 56 deletions
diff --git a/target/riscv/vector_helper.c b/target/riscv/vector_helper.c
index 9141292994..df45c1620c 100644
--- a/target/riscv/vector_helper.c
+++ b/target/riscv/vector_helper.c
@@ -217,15 +217,6 @@ static void NAME(CPURISCVState *env, abi_ptr addr, \
cpu_##STSUF##_data_ra(env, addr, data, retaddr); \
}
-GEN_VEXT_ST_ELEM(stb_b, int8_t, H1, stb)
-GEN_VEXT_ST_ELEM(stb_h, int16_t, H2, stb)
-GEN_VEXT_ST_ELEM(stb_w, int32_t, H4, stb)
-GEN_VEXT_ST_ELEM(stb_d, int64_t, H8, stb)
-GEN_VEXT_ST_ELEM(sth_h, int16_t, H2, stw)
-GEN_VEXT_ST_ELEM(sth_w, int32_t, H4, stw)
-GEN_VEXT_ST_ELEM(sth_d, int64_t, H8, stw)
-GEN_VEXT_ST_ELEM(stw_w, int32_t, H4, stl)
-GEN_VEXT_ST_ELEM(stw_d, int64_t, H8, stl)
GEN_VEXT_ST_ELEM(ste_b, int8_t, H1, stb)
GEN_VEXT_ST_ELEM(ste_h, int16_t, H2, stw)
GEN_VEXT_ST_ELEM(ste_w, int32_t, H4, stl)
@@ -393,8 +384,7 @@ vext_ldst_index(void *vd, void *v0, target_ulong base,
void *vs2, CPURISCVState *env, uint32_t desc,
vext_get_index_addr get_index_addr,
vext_ldst_elem_fn *ldst_elem,
- uint32_t esz, uint32_t msz, uintptr_t ra,
- MMUAccessType access_type)
+ uint32_t esz, uintptr_t ra, MMUAccessType access_type)
{
uint32_t i, k;
uint32_t nf = vext_nf(desc);
@@ -406,7 +396,7 @@ vext_ldst_index(void *vd, void *v0, target_ulong base,
if (!vm && !vext_elem_mask(v0, i)) {
continue;
}
- probe_pages(env, get_index_addr(base, i, vs2), nf * msz, ra,
+ probe_pages(env, get_index_addr(base, i, vs2), nf * esz, ra,
access_type);
}
/* load bytes from guest memory */
@@ -416,67 +406,63 @@ vext_ldst_index(void *vd, void *v0, target_ulong base,
continue;
}
while (k < nf) {
- abi_ptr addr = get_index_addr(base, i, vs2) + k * msz;
+ abi_ptr addr = get_index_addr(base, i, vs2) + k * esz;
ldst_elem(env, addr, i + k * vlmax, vd, ra);
k++;
}
}
}
-#define GEN_VEXT_LD_INDEX(NAME, MTYPE, ETYPE, INDEX_FN, LOAD_FN) \
+#define GEN_VEXT_LD_INDEX(NAME, ETYPE, INDEX_FN, LOAD_FN) \
void HELPER(NAME)(void *vd, void *v0, target_ulong base, \
void *vs2, CPURISCVState *env, uint32_t desc) \
{ \
vext_ldst_index(vd, v0, base, vs2, env, desc, INDEX_FN, \
- LOAD_FN, sizeof(ETYPE), sizeof(MTYPE), \
- GETPC(), MMU_DATA_LOAD); \
-}
-
-GEN_VEXT_LD_INDEX(vlxb_v_b, int8_t, int8_t, idx_b, ldb_b)
-GEN_VEXT_LD_INDEX(vlxb_v_h, int8_t, int16_t, idx_h, ldb_h)
-GEN_VEXT_LD_INDEX(vlxb_v_w, int8_t, int32_t, idx_w, ldb_w)
-GEN_VEXT_LD_INDEX(vlxb_v_d, int8_t, int64_t, idx_d, ldb_d)
-GEN_VEXT_LD_INDEX(vlxh_v_h, int16_t, int16_t, idx_h, ldh_h)
-GEN_VEXT_LD_INDEX(vlxh_v_w, int16_t, int32_t, idx_w, ldh_w)
-GEN_VEXT_LD_INDEX(vlxh_v_d, int16_t, int64_t, idx_d, ldh_d)
-GEN_VEXT_LD_INDEX(vlxw_v_w, int32_t, int32_t, idx_w, ldw_w)
-GEN_VEXT_LD_INDEX(vlxw_v_d, int32_t, int64_t, idx_d, ldw_d)
-GEN_VEXT_LD_INDEX(vlxe_v_b, int8_t, int8_t, idx_b, lde_b)
-GEN_VEXT_LD_INDEX(vlxe_v_h, int16_t, int16_t, idx_h, lde_h)
-GEN_VEXT_LD_INDEX(vlxe_v_w, int32_t, int32_t, idx_w, lde_w)
-GEN_VEXT_LD_INDEX(vlxe_v_d, int64_t, int64_t, idx_d, lde_d)
-GEN_VEXT_LD_INDEX(vlxbu_v_b, uint8_t, uint8_t, idx_b, ldbu_b)
-GEN_VEXT_LD_INDEX(vlxbu_v_h, uint8_t, uint16_t, idx_h, ldbu_h)
-GEN_VEXT_LD_INDEX(vlxbu_v_w, uint8_t, uint32_t, idx_w, ldbu_w)
-GEN_VEXT_LD_INDEX(vlxbu_v_d, uint8_t, uint64_t, idx_d, ldbu_d)
-GEN_VEXT_LD_INDEX(vlxhu_v_h, uint16_t, uint16_t, idx_h, ldhu_h)
-GEN_VEXT_LD_INDEX(vlxhu_v_w, uint16_t, uint32_t, idx_w, ldhu_w)
-GEN_VEXT_LD_INDEX(vlxhu_v_d, uint16_t, uint64_t, idx_d, ldhu_d)
-GEN_VEXT_LD_INDEX(vlxwu_v_w, uint32_t, uint32_t, idx_w, ldwu_w)
-GEN_VEXT_LD_INDEX(vlxwu_v_d, uint32_t, uint64_t, idx_d, ldwu_d)
-
-#define GEN_VEXT_ST_INDEX(NAME, MTYPE, ETYPE, INDEX_FN, STORE_FN)\
+ LOAD_FN, sizeof(ETYPE), GETPC(), MMU_DATA_LOAD); \
+}
+
+GEN_VEXT_LD_INDEX(vlxei8_8_v, int8_t, idx_b, lde_b)
+GEN_VEXT_LD_INDEX(vlxei8_16_v, int16_t, idx_b, lde_h)
+GEN_VEXT_LD_INDEX(vlxei8_32_v, int32_t, idx_b, lde_w)
+GEN_VEXT_LD_INDEX(vlxei8_64_v, int64_t, idx_b, lde_d)
+GEN_VEXT_LD_INDEX(vlxei16_8_v, int8_t, idx_h, lde_b)
+GEN_VEXT_LD_INDEX(vlxei16_16_v, int16_t, idx_h, lde_h)
+GEN_VEXT_LD_INDEX(vlxei16_32_v, int32_t, idx_h, lde_w)
+GEN_VEXT_LD_INDEX(vlxei16_64_v, int64_t, idx_h, lde_d)
+GEN_VEXT_LD_INDEX(vlxei32_8_v, int8_t, idx_w, lde_b)
+GEN_VEXT_LD_INDEX(vlxei32_16_v, int16_t, idx_w, lde_h)
+GEN_VEXT_LD_INDEX(vlxei32_32_v, int32_t, idx_w, lde_w)
+GEN_VEXT_LD_INDEX(vlxei32_64_v, int64_t, idx_w, lde_d)
+GEN_VEXT_LD_INDEX(vlxei64_8_v, int8_t, idx_d, lde_b)
+GEN_VEXT_LD_INDEX(vlxei64_16_v, int16_t, idx_d, lde_h)
+GEN_VEXT_LD_INDEX(vlxei64_32_v, int32_t, idx_d, lde_w)
+GEN_VEXT_LD_INDEX(vlxei64_64_v, int64_t, idx_d, lde_d)
+
+#define GEN_VEXT_ST_INDEX(NAME, ETYPE, INDEX_FN, STORE_FN) \
void HELPER(NAME)(void *vd, void *v0, target_ulong base, \
void *vs2, CPURISCVState *env, uint32_t desc) \
{ \
vext_ldst_index(vd, v0, base, vs2, env, desc, INDEX_FN, \
- STORE_FN, sizeof(ETYPE), sizeof(MTYPE), \
+ STORE_FN, sizeof(ETYPE), \
GETPC(), MMU_DATA_STORE); \
}
-GEN_VEXT_ST_INDEX(vsxb_v_b, int8_t, int8_t, idx_b, stb_b)
-GEN_VEXT_ST_INDEX(vsxb_v_h, int8_t, int16_t, idx_h, stb_h)
-GEN_VEXT_ST_INDEX(vsxb_v_w, int8_t, int32_t, idx_w, stb_w)
-GEN_VEXT_ST_INDEX(vsxb_v_d, int8_t, int64_t, idx_d, stb_d)
-GEN_VEXT_ST_INDEX(vsxh_v_h, int16_t, int16_t, idx_h, sth_h)
-GEN_VEXT_ST_INDEX(vsxh_v_w, int16_t, int32_t, idx_w, sth_w)
-GEN_VEXT_ST_INDEX(vsxh_v_d, int16_t, int64_t, idx_d, sth_d)
-GEN_VEXT_ST_INDEX(vsxw_v_w, int32_t, int32_t, idx_w, stw_w)
-GEN_VEXT_ST_INDEX(vsxw_v_d, int32_t, int64_t, idx_d, stw_d)
-GEN_VEXT_ST_INDEX(vsxe_v_b, int8_t, int8_t, idx_b, ste_b)
-GEN_VEXT_ST_INDEX(vsxe_v_h, int16_t, int16_t, idx_h, ste_h)
-GEN_VEXT_ST_INDEX(vsxe_v_w, int32_t, int32_t, idx_w, ste_w)
-GEN_VEXT_ST_INDEX(vsxe_v_d, int64_t, int64_t, idx_d, ste_d)
+GEN_VEXT_ST_INDEX(vsxei8_8_v, int8_t, idx_b, ste_b)
+GEN_VEXT_ST_INDEX(vsxei8_16_v, int16_t, idx_b, ste_h)
+GEN_VEXT_ST_INDEX(vsxei8_32_v, int32_t, idx_b, ste_w)
+GEN_VEXT_ST_INDEX(vsxei8_64_v, int64_t, idx_b, ste_d)
+GEN_VEXT_ST_INDEX(vsxei16_8_v, int8_t, idx_h, ste_b)
+GEN_VEXT_ST_INDEX(vsxei16_16_v, int16_t, idx_h, ste_h)
+GEN_VEXT_ST_INDEX(vsxei16_32_v, int32_t, idx_h, ste_w)
+GEN_VEXT_ST_INDEX(vsxei16_64_v, int64_t, idx_h, ste_d)
+GEN_VEXT_ST_INDEX(vsxei32_8_v, int8_t, idx_w, ste_b)
+GEN_VEXT_ST_INDEX(vsxei32_16_v, int16_t, idx_w, ste_h)
+GEN_VEXT_ST_INDEX(vsxei32_32_v, int32_t, idx_w, ste_w)
+GEN_VEXT_ST_INDEX(vsxei32_64_v, int64_t, idx_w, ste_d)
+GEN_VEXT_ST_INDEX(vsxei64_8_v, int8_t, idx_d, ste_b)
+GEN_VEXT_ST_INDEX(vsxei64_16_v, int16_t, idx_d, ste_h)
+GEN_VEXT_ST_INDEX(vsxei64_32_v, int32_t, idx_d, ste_w)
+GEN_VEXT_ST_INDEX(vsxei64_64_v, int64_t, idx_d, ste_d)
/*
*** unit-stride fault-only-fisrt load instructions