tcg/loongarch64: Rationalize args to tcg_out_qemu_{ld,st}

Interpret the variable argument placement in the caller.  Shift some
code around slightly to share more between softmmu and user-only.

Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-04-08 15:13:04 -07:00
parent 1df6d611bd
commit 7f67e58236

View file

@ -1049,39 +1049,31 @@ static void tcg_out_qemu_ld_indexed(TCGContext *s, TCGReg rd, TCGReg rj,
} }
} }
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, TCGType type) static void tcg_out_qemu_ld(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
MemOpIdx oi, TCGType data_type)
{ {
TCGReg addr_regl; MemOp opc = get_memop(oi);
TCGReg data_regl; TCGReg base, index;
MemOpIdx oi;
MemOp opc; #ifdef CONFIG_SOFTMMU
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1]; tcg_insn_unit *label_ptr[1];
#else
unsigned a_bits;
#endif
TCGReg base;
data_regl = *args++; tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 1);
addr_regl = *args++; index = TCG_REG_TMP2;
oi = *args++;
opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU)
tcg_out_tlb_load(s, addr_regl, oi, label_ptr, 1);
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
tcg_out_qemu_ld_indexed(s, data_regl, base, TCG_REG_TMP2, opc, type);
add_qemu_ldst_label(s, 1, oi, type,
data_regl, addr_regl,
s->code_ptr, label_ptr);
#else #else
a_bits = get_alignment_bits(opc); unsigned a_bits = get_alignment_bits(opc);
if (a_bits) { if (a_bits) {
tcg_out_test_alignment(s, true, addr_regl, a_bits); tcg_out_test_alignment(s, true, addr_reg, a_bits);
} }
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0); index = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
TCGReg guest_base_reg = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO; #endif
tcg_out_qemu_ld_indexed(s, data_regl, base, guest_base_reg, opc, type);
base = tcg_out_zext_addr_if_32_bit(s, addr_reg, TCG_REG_TMP0);
tcg_out_qemu_ld_indexed(s, data_reg, base, index, opc, data_type);
#ifdef CONFIG_SOFTMMU
add_qemu_ldst_label(s, true, oi, data_type, data_reg, addr_reg,
s->code_ptr, label_ptr);
#endif #endif
} }
@ -1109,39 +1101,31 @@ static void tcg_out_qemu_st_indexed(TCGContext *s, TCGReg data,
} }
} }
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, TCGType type) static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
MemOpIdx oi, TCGType data_type)
{ {
TCGReg addr_regl; MemOp opc = get_memop(oi);
TCGReg data_regl; TCGReg base, index;
MemOpIdx oi;
MemOp opc; #ifdef CONFIG_SOFTMMU
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1]; tcg_insn_unit *label_ptr[1];
#else
unsigned a_bits;
#endif
TCGReg base;
data_regl = *args++; tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 0);
addr_regl = *args++; index = TCG_REG_TMP2;
oi = *args++;
opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU)
tcg_out_tlb_load(s, addr_regl, oi, label_ptr, 0);
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
tcg_out_qemu_st_indexed(s, data_regl, base, TCG_REG_TMP2, opc);
add_qemu_ldst_label(s, 0, oi, type,
data_regl, addr_regl,
s->code_ptr, label_ptr);
#else #else
a_bits = get_alignment_bits(opc); unsigned a_bits = get_alignment_bits(opc);
if (a_bits) { if (a_bits) {
tcg_out_test_alignment(s, false, addr_regl, a_bits); tcg_out_test_alignment(s, false, addr_reg, a_bits);
} }
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0); index = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
TCGReg guest_base_reg = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO; #endif
tcg_out_qemu_st_indexed(s, data_regl, base, guest_base_reg, opc);
base = tcg_out_zext_addr_if_32_bit(s, addr_reg, TCG_REG_TMP0);
tcg_out_qemu_st_indexed(s, data_reg, base, index, opc);
#ifdef CONFIG_SOFTMMU
add_qemu_ldst_label(s, false, oi, data_type, data_reg, addr_reg,
s->code_ptr, label_ptr);
#endif #endif
} }
@ -1564,16 +1548,16 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break; break;
case INDEX_op_qemu_ld_i32: case INDEX_op_qemu_ld_i32:
tcg_out_qemu_ld(s, args, TCG_TYPE_I32); tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break; break;
case INDEX_op_qemu_ld_i64: case INDEX_op_qemu_ld_i64:
tcg_out_qemu_ld(s, args, TCG_TYPE_I64); tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break; break;
case INDEX_op_qemu_st_i32: case INDEX_op_qemu_st_i32:
tcg_out_qemu_st(s, args, TCG_TYPE_I32); tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break; break;
case INDEX_op_qemu_st_i64: case INDEX_op_qemu_st_i64:
tcg_out_qemu_st(s, args, TCG_TYPE_I64); tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break; break;
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */ case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */