mirror of
https://github.com/Motorhead1991/qemu.git
synced 2025-08-18 23:52:14 -06:00
tcg: Formalize tcg_out_mb
Most tcg backends already have a function for this; the rest can split one out from tcg_out_op. Call it directly from tcg_gen_code. Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org> Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
f2b1708e80
commit
e038696c92
11 changed files with 28 additions and 51 deletions
|
@ -1575,7 +1575,7 @@ static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rn)
|
||||||
tcg_out_mov(s, TCG_TYPE_I32, rd, rn);
|
tcg_out_mov(s, TCG_TYPE_I32, rd, rn);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
static const uint32_t sync[] = {
|
static const uint32_t sync[] = {
|
||||||
[0 ... TCG_MO_ALL] = DMB_ISH | DMB_LD | DMB_ST,
|
[0 ... TCG_MO_ALL] = DMB_ISH | DMB_LD | DMB_ST,
|
||||||
|
@ -2845,10 +2845,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType ext,
|
||||||
tcg_out_qemu_ldst_i128(s, a0, a1, a2, args[3], false);
|
tcg_out_qemu_ldst_i128(s, a0, a1, a2, args[3], false);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, a0);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -1203,7 +1203,7 @@ static void tcg_out_goto_label(TCGContext *s, ARMCond cond, TCGLabel *l)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
if (use_armv7_instructions) {
|
if (use_armv7_instructions) {
|
||||||
tcg_out32(s, INSN_DMB_ISH);
|
tcg_out32(s, INSN_DMB_ISH);
|
||||||
|
@ -2565,10 +2565,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
tcg_out_qemu_st(s, args[0], args[1], args[2], args[3], TCG_TYPE_I64);
|
tcg_out_qemu_st(s, args[0], args[1], args[2], args[3], TCG_TYPE_I64);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, args[0]);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -1168,7 +1168,7 @@ static inline void tcg_out_pushi(TCGContext *s, tcg_target_long val)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
/* Given the strength of x86 memory ordering, we only need care for
|
/* Given the strength of x86 memory ordering, we only need care for
|
||||||
store-load ordering. Experimentally, "lock orl $0,0(%esp)" is
|
store-load ordering. Experimentally, "lock orl $0,0(%esp)" is
|
||||||
|
@ -3536,9 +3536,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, a0);
|
|
||||||
break;
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -301,7 +301,7 @@ static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
|
||||||
* TCG intrinsics
|
* TCG intrinsics
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
/* Baseline LoongArch only has the full barrier, unfortunately. */
|
/* Baseline LoongArch only has the full barrier, unfortunately. */
|
||||||
tcg_out_opc_dbar(s, 0);
|
tcg_out_opc_dbar(s, 0);
|
||||||
|
@ -1917,10 +1917,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
TCGArg a3 = args[3];
|
TCGArg a3 = args[3];
|
||||||
|
|
||||||
switch (opc) {
|
switch (opc) {
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, a0);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_goto_ptr:
|
case INDEX_op_goto_ptr:
|
||||||
tcg_out_opc_jirl(s, TCG_REG_ZERO, a0, 0);
|
tcg_out_opc_jirl(s, TCG_REG_ZERO, a0, 0);
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -1491,7 +1491,7 @@ static void tcg_out_qemu_st(TCGContext *s, TCGReg datalo, TCGReg datahi,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
static const MIPSInsn sync[] = {
|
static const MIPSInsn sync[] = {
|
||||||
/* Note that SYNC_MB is a slightly weaker than SYNC 0,
|
/* Note that SYNC_MB is a slightly weaker than SYNC 0,
|
||||||
|
@ -2352,9 +2352,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, a0);
|
|
||||||
break;
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -2208,7 +2208,7 @@ static const TCGOutOpBrcond2 outop_brcond2 = {
|
||||||
.out = tgen_brcond2,
|
.out = tgen_brcond2,
|
||||||
};
|
};
|
||||||
|
|
||||||
static void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
uint32_t insn;
|
uint32_t insn;
|
||||||
|
|
||||||
|
@ -3758,10 +3758,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], false);
|
tcg_out_qemu_ldst_i128(s, args[0], args[1], args[2], args[3], false);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, args[0]);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -1582,7 +1582,7 @@ static void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg,
|
||||||
tcg_out_call_int(s, arg, false);
|
tcg_out_call_int(s, arg, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
tcg_insn_unit insn = OPC_FENCE;
|
tcg_insn_unit insn = OPC_FENCE;
|
||||||
|
|
||||||
|
@ -2594,10 +2594,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
|
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, a0);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -3008,6 +3008,17 @@ static const TCGOutOpUnary outop_not = {
|
||||||
.out_rr = tgen_not,
|
.out_rr = tgen_not,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
|
{
|
||||||
|
/*
|
||||||
|
* The host memory model is quite strong, we simply need to
|
||||||
|
* serialize the instruction stream.
|
||||||
|
*/
|
||||||
|
if (a0 & TCG_MO_ST_LD) {
|
||||||
|
/* fast-bcr-serialization facility (45) is present */
|
||||||
|
tcg_out_insn(s, RR, BCR, 14, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# define OP_32_64(x) \
|
# define OP_32_64(x) \
|
||||||
case glue(glue(INDEX_op_,x),_i32): \
|
case glue(glue(INDEX_op_,x),_i32): \
|
||||||
|
@ -3107,15 +3118,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
tcg_out_st(s, TCG_TYPE_I64, args[0], args[1], args[2]);
|
tcg_out_st(s, TCG_TYPE_I64, args[0], args[1], args[2]);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
/* The host memory model is quite strong, we simply need to
|
|
||||||
serialize the instruction stream. */
|
|
||||||
if (args[0] & TCG_MO_ST_LD) {
|
|
||||||
/* fast-bcr-serialization facility (45) is present */
|
|
||||||
tcg_out_insn(s, RR, BCR, 14, 0);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -949,7 +949,7 @@ static void tcg_out_call(TCGContext *s, const tcg_insn_unit *dest,
|
||||||
tcg_out_nop(s);
|
tcg_out_nop(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void tcg_out_mb(TCGContext *s, TCGArg a0)
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
{
|
{
|
||||||
/* Note that the TCG memory order constants mirror the Sparc MEMBAR. */
|
/* Note that the TCG memory order constants mirror the Sparc MEMBAR. */
|
||||||
tcg_out32(s, MEMBAR | (a0 & TCG_MO_ALL));
|
tcg_out32(s, MEMBAR | (a0 & TCG_MO_ALL));
|
||||||
|
@ -2025,10 +2025,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
tcg_out_ldst(s, a0, a1, a2, STX);
|
tcg_out_ldst(s, a0, a1, a2, STX);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_mb(s, a0);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
|
@ -133,6 +133,7 @@ static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
|
||||||
static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2);
|
static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2);
|
||||||
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
|
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
|
||||||
static void tcg_out_goto_tb(TCGContext *s, int which);
|
static void tcg_out_goto_tb(TCGContext *s, int which);
|
||||||
|
static void tcg_out_mb(TCGContext *s, unsigned bar);
|
||||||
static void tcg_out_set_carry(TCGContext *s);
|
static void tcg_out_set_carry(TCGContext *s);
|
||||||
static void tcg_out_set_borrow(TCGContext *s);
|
static void tcg_out_set_borrow(TCGContext *s);
|
||||||
static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
|
@ -6899,6 +6900,9 @@ int tcg_gen_code(TCGContext *s, TranslationBlock *tb, uint64_t pc_start)
|
||||||
case INDEX_op_goto_tb:
|
case INDEX_op_goto_tb:
|
||||||
tcg_out_goto_tb(s, op->args[0]);
|
tcg_out_goto_tb(s, op->args[0]);
|
||||||
break;
|
break;
|
||||||
|
case INDEX_op_mb:
|
||||||
|
tcg_out_mb(s, op->args[0]);
|
||||||
|
break;
|
||||||
case INDEX_op_dup2_vec:
|
case INDEX_op_dup2_vec:
|
||||||
if (tcg_reg_alloc_dup2(s, op)) {
|
if (tcg_reg_alloc_dup2(s, op)) {
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -1131,6 +1131,11 @@ static const TCGOutOpSetcond2 outop_setcond2 = {
|
||||||
.out = tgen_setcond2,
|
.out = tgen_setcond2,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static void tcg_out_mb(TCGContext *s, unsigned a0)
|
||||||
|
{
|
||||||
|
tcg_out_op_v(s, INDEX_op_mb);
|
||||||
|
}
|
||||||
|
|
||||||
static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
const TCGArg args[TCG_MAX_OP_ARGS],
|
const TCGArg args[TCG_MAX_OP_ARGS],
|
||||||
const int const_args[TCG_MAX_OP_ARGS])
|
const int const_args[TCG_MAX_OP_ARGS])
|
||||||
|
@ -1178,10 +1183,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_mb:
|
|
||||||
tcg_out_op_v(s, opc);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
case INDEX_op_call: /* Always emitted via tcg_out_call. */
|
||||||
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
|
||||||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue