mirror of
https://github.com/Motorhead1991/qemu.git
synced 2025-08-05 00:33:55 -06:00
target/riscv: add vector index load and store instructions
Vector indexed operations add the contents of each element of the vector offset operand specified by vs2 to the base effective address to give the effective address of each element. Signed-off-by: LIU Zhiwei <zhiwei_liu@c-sky.com> Reviewed-by: Alistair Francis <alistair.francis@wdc.com> Reviewed-by: Richard Henderson <richard.henderson@linaro.org> Message-Id: <20200701152549.1218-8-zhiwei_liu@c-sky.com> Signed-off-by: Alistair Francis <alistair.francis@wdc.com>
This commit is contained in:
parent
751538d5da
commit
f732560e35
4 changed files with 293 additions and 0 deletions
|
@ -461,3 +461,119 @@ GEN_VEXT_ST_US(vse_v_b, int8_t, int8_t , ste_b)
|
|||
GEN_VEXT_ST_US(vse_v_h, int16_t, int16_t, ste_h)
|
||||
GEN_VEXT_ST_US(vse_v_w, int32_t, int32_t, ste_w)
|
||||
GEN_VEXT_ST_US(vse_v_d, int64_t, int64_t, ste_d)
|
||||
|
||||
/*
|
||||
*** index: access vector element from indexed memory
|
||||
*/
|
||||
typedef target_ulong vext_get_index_addr(target_ulong base,
|
||||
uint32_t idx, void *vs2);
|
||||
|
||||
#define GEN_VEXT_GET_INDEX_ADDR(NAME, ETYPE, H) \
|
||||
static target_ulong NAME(target_ulong base, \
|
||||
uint32_t idx, void *vs2) \
|
||||
{ \
|
||||
return (base + *((ETYPE *)vs2 + H(idx))); \
|
||||
}
|
||||
|
||||
GEN_VEXT_GET_INDEX_ADDR(idx_b, int8_t, H1)
|
||||
GEN_VEXT_GET_INDEX_ADDR(idx_h, int16_t, H2)
|
||||
GEN_VEXT_GET_INDEX_ADDR(idx_w, int32_t, H4)
|
||||
GEN_VEXT_GET_INDEX_ADDR(idx_d, int64_t, H8)
|
||||
|
||||
static inline void
|
||||
vext_ldst_index(void *vd, void *v0, target_ulong base,
|
||||
void *vs2, CPURISCVState *env, uint32_t desc,
|
||||
vext_get_index_addr get_index_addr,
|
||||
vext_ldst_elem_fn *ldst_elem,
|
||||
clear_fn *clear_elem,
|
||||
uint32_t esz, uint32_t msz, uintptr_t ra,
|
||||
MMUAccessType access_type)
|
||||
{
|
||||
uint32_t i, k;
|
||||
uint32_t nf = vext_nf(desc);
|
||||
uint32_t vm = vext_vm(desc);
|
||||
uint32_t mlen = vext_mlen(desc);
|
||||
uint32_t vlmax = vext_maxsz(desc) / esz;
|
||||
|
||||
/* probe every access*/
|
||||
for (i = 0; i < env->vl; i++) {
|
||||
if (!vm && !vext_elem_mask(v0, mlen, i)) {
|
||||
continue;
|
||||
}
|
||||
probe_pages(env, get_index_addr(base, i, vs2), nf * msz, ra,
|
||||
access_type);
|
||||
}
|
||||
/* load bytes from guest memory */
|
||||
for (i = 0; i < env->vl; i++) {
|
||||
k = 0;
|
||||
if (!vm && !vext_elem_mask(v0, mlen, i)) {
|
||||
continue;
|
||||
}
|
||||
while (k < nf) {
|
||||
abi_ptr addr = get_index_addr(base, i, vs2) + k * msz;
|
||||
ldst_elem(env, addr, i + k * vlmax, vd, ra);
|
||||
k++;
|
||||
}
|
||||
}
|
||||
/* clear tail elements */
|
||||
if (clear_elem) {
|
||||
for (k = 0; k < nf; k++) {
|
||||
clear_elem(vd, env->vl + k * vlmax, env->vl * esz, vlmax * esz);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#define GEN_VEXT_LD_INDEX(NAME, MTYPE, ETYPE, INDEX_FN, LOAD_FN, CLEAR_FN) \
|
||||
void HELPER(NAME)(void *vd, void *v0, target_ulong base, \
|
||||
void *vs2, CPURISCVState *env, uint32_t desc) \
|
||||
{ \
|
||||
vext_ldst_index(vd, v0, base, vs2, env, desc, INDEX_FN, \
|
||||
LOAD_FN, CLEAR_FN, sizeof(ETYPE), sizeof(MTYPE), \
|
||||
GETPC(), MMU_DATA_LOAD); \
|
||||
}
|
||||
|
||||
GEN_VEXT_LD_INDEX(vlxb_v_b, int8_t, int8_t, idx_b, ldb_b, clearb)
|
||||
GEN_VEXT_LD_INDEX(vlxb_v_h, int8_t, int16_t, idx_h, ldb_h, clearh)
|
||||
GEN_VEXT_LD_INDEX(vlxb_v_w, int8_t, int32_t, idx_w, ldb_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxb_v_d, int8_t, int64_t, idx_d, ldb_d, clearq)
|
||||
GEN_VEXT_LD_INDEX(vlxh_v_h, int16_t, int16_t, idx_h, ldh_h, clearh)
|
||||
GEN_VEXT_LD_INDEX(vlxh_v_w, int16_t, int32_t, idx_w, ldh_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxh_v_d, int16_t, int64_t, idx_d, ldh_d, clearq)
|
||||
GEN_VEXT_LD_INDEX(vlxw_v_w, int32_t, int32_t, idx_w, ldw_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxw_v_d, int32_t, int64_t, idx_d, ldw_d, clearq)
|
||||
GEN_VEXT_LD_INDEX(vlxe_v_b, int8_t, int8_t, idx_b, lde_b, clearb)
|
||||
GEN_VEXT_LD_INDEX(vlxe_v_h, int16_t, int16_t, idx_h, lde_h, clearh)
|
||||
GEN_VEXT_LD_INDEX(vlxe_v_w, int32_t, int32_t, idx_w, lde_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxe_v_d, int64_t, int64_t, idx_d, lde_d, clearq)
|
||||
GEN_VEXT_LD_INDEX(vlxbu_v_b, uint8_t, uint8_t, idx_b, ldbu_b, clearb)
|
||||
GEN_VEXT_LD_INDEX(vlxbu_v_h, uint8_t, uint16_t, idx_h, ldbu_h, clearh)
|
||||
GEN_VEXT_LD_INDEX(vlxbu_v_w, uint8_t, uint32_t, idx_w, ldbu_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxbu_v_d, uint8_t, uint64_t, idx_d, ldbu_d, clearq)
|
||||
GEN_VEXT_LD_INDEX(vlxhu_v_h, uint16_t, uint16_t, idx_h, ldhu_h, clearh)
|
||||
GEN_VEXT_LD_INDEX(vlxhu_v_w, uint16_t, uint32_t, idx_w, ldhu_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxhu_v_d, uint16_t, uint64_t, idx_d, ldhu_d, clearq)
|
||||
GEN_VEXT_LD_INDEX(vlxwu_v_w, uint32_t, uint32_t, idx_w, ldwu_w, clearl)
|
||||
GEN_VEXT_LD_INDEX(vlxwu_v_d, uint32_t, uint64_t, idx_d, ldwu_d, clearq)
|
||||
|
||||
#define GEN_VEXT_ST_INDEX(NAME, MTYPE, ETYPE, INDEX_FN, STORE_FN)\
|
||||
void HELPER(NAME)(void *vd, void *v0, target_ulong base, \
|
||||
void *vs2, CPURISCVState *env, uint32_t desc) \
|
||||
{ \
|
||||
vext_ldst_index(vd, v0, base, vs2, env, desc, INDEX_FN, \
|
||||
STORE_FN, NULL, sizeof(ETYPE), sizeof(MTYPE),\
|
||||
GETPC(), MMU_DATA_STORE); \
|
||||
}
|
||||
|
||||
GEN_VEXT_ST_INDEX(vsxb_v_b, int8_t, int8_t, idx_b, stb_b)
|
||||
GEN_VEXT_ST_INDEX(vsxb_v_h, int8_t, int16_t, idx_h, stb_h)
|
||||
GEN_VEXT_ST_INDEX(vsxb_v_w, int8_t, int32_t, idx_w, stb_w)
|
||||
GEN_VEXT_ST_INDEX(vsxb_v_d, int8_t, int64_t, idx_d, stb_d)
|
||||
GEN_VEXT_ST_INDEX(vsxh_v_h, int16_t, int16_t, idx_h, sth_h)
|
||||
GEN_VEXT_ST_INDEX(vsxh_v_w, int16_t, int32_t, idx_w, sth_w)
|
||||
GEN_VEXT_ST_INDEX(vsxh_v_d, int16_t, int64_t, idx_d, sth_d)
|
||||
GEN_VEXT_ST_INDEX(vsxw_v_w, int32_t, int32_t, idx_w, stw_w)
|
||||
GEN_VEXT_ST_INDEX(vsxw_v_d, int32_t, int64_t, idx_d, stw_d)
|
||||
GEN_VEXT_ST_INDEX(vsxe_v_b, int8_t, int8_t, idx_b, ste_b)
|
||||
GEN_VEXT_ST_INDEX(vsxe_v_h, int16_t, int16_t, idx_h, ste_h)
|
||||
GEN_VEXT_ST_INDEX(vsxe_v_w, int32_t, int32_t, idx_w, ste_w)
|
||||
GEN_VEXT_ST_INDEX(vsxe_v_d, int64_t, int64_t, idx_d, ste_d)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue