tcg/loongarch64: Use tcg_use_softmmu

Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-10-01 18:05:27 +00:00
parent 915e1d52e2
commit 10e1fd2784

View file

@ -165,10 +165,7 @@ static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
return TCG_REG_A0 + slot; return TCG_REG_A0 + slot;
} }
#ifndef CONFIG_SOFTMMU
#define USE_GUEST_BASE (guest_base != 0)
#define TCG_GUEST_BASE_REG TCG_REG_S1 #define TCG_GUEST_BASE_REG TCG_REG_S1
#endif
#define TCG_CT_CONST_ZERO 0x100 #define TCG_CT_CONST_ZERO 0x100
#define TCG_CT_CONST_S12 0x200 #define TCG_CT_CONST_S12 0x200
@ -908,7 +905,7 @@ static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h,
h->aa = atom_and_align_for_opc(s, opc, MO_ATOM_IFALIGN, false); h->aa = atom_and_align_for_opc(s, opc, MO_ATOM_IFALIGN, false);
a_bits = h->aa.align; a_bits = h->aa.align;
#ifdef CONFIG_SOFTMMU if (tcg_use_softmmu) {
unsigned s_bits = opc & MO_SIZE; unsigned s_bits = opc & MO_SIZE;
int mem_index = get_mmuidx(oi); int mem_index = get_mmuidx(oi);
int fast_ofs = tlb_mask_table_ofs(s, mem_index); int fast_ofs = tlb_mask_table_ofs(s, mem_index);
@ -937,9 +934,10 @@ static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h,
offsetof(CPUTLBEntry, addend)); offsetof(CPUTLBEntry, addend));
/* /*
* For aligned accesses, we check the first byte and include the alignment * For aligned accesses, we check the first byte and include the
* bits within the address. For unaligned access, we check that we don't * alignment bits within the address. For unaligned access, we
* cross pages using the address of the last byte of the access. * check that we don't cross pages using the address of the last
* byte of the access.
*/ */
if (a_bits < s_bits) { if (a_bits < s_bits) {
unsigned a_mask = (1u << a_bits) - 1; unsigned a_mask = (1u << a_bits) - 1;
@ -956,7 +954,7 @@ static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h,
tcg_out_opc_bne(s, TCG_REG_TMP0, TCG_REG_TMP1, 0); tcg_out_opc_bne(s, TCG_REG_TMP0, TCG_REG_TMP1, 0);
h->index = TCG_REG_TMP2; h->index = TCG_REG_TMP2;
#else } else {
if (a_bits) { if (a_bits) {
ldst = new_ldst_label(s); ldst = new_ldst_label(s);
@ -976,8 +974,8 @@ static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h,
tcg_out_opc_bne(s, TCG_REG_TMP1, TCG_REG_ZERO, 0); tcg_out_opc_bne(s, TCG_REG_TMP1, TCG_REG_ZERO, 0);
} }
h->index = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO; h->index = guest_base ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
#endif }
if (addr_type == TCG_TYPE_I32) { if (addr_type == TCG_TYPE_I32) {
h->base = TCG_REG_TMP0; h->base = TCG_REG_TMP0;
@ -2272,12 +2270,10 @@ static void tcg_target_qemu_prologue(TCGContext *s)
TCG_REG_SP, SAVE_OFS + i * REG_SIZE); TCG_REG_SP, SAVE_OFS + i * REG_SIZE);
} }
#if !defined(CONFIG_SOFTMMU) if (!tcg_use_softmmu && guest_base) {
if (USE_GUEST_BASE) {
tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, guest_base); tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, guest_base);
tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG); tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
} }
#endif
/* Call generated code */ /* Call generated code */
tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]); tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);