tcg: Convert ext_i32_i64 to TCGOutOpUnary

Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2025-01-12 12:22:45 -08:00
parent fa361eefac
commit b7b7347fe3
10 changed files with 19 additions and 21 deletions

View file

@ -2710,7 +2710,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType ext,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -3177,7 +3176,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_ld32u_i64:
case INDEX_op_ld32s_i64:
case INDEX_op_ld_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
return C_O1_I1(r, r);

View file

@ -3413,7 +3413,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -4001,7 +4000,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_extrh_i64_i32:
return C_O1_I1(r, 0);
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
return C_O1_I1(r, r);

View file

@ -1943,7 +1943,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -2468,7 +2467,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
case INDEX_op_ext_i32_i64:
case INDEX_op_ld8s_i32:
case INDEX_op_ld8s_i64:
case INDEX_op_ld8u_i32:

View file

@ -2364,7 +2364,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -2391,7 +2390,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_ld32s_i64:
case INDEX_op_ld32u_i64:
case INDEX_op_ld_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:

View file

@ -3640,7 +3640,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -4270,7 +4269,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_ld32u_i64:
case INDEX_op_ld32s_i64:
case INDEX_op_ld_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
return C_O1_I1(r, r);

View file

@ -2630,7 +2630,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -2877,7 +2876,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
case INDEX_op_extrh_i64_i32:
case INDEX_op_ext_i32_i64:
return C_O1_I1(r, r);
case INDEX_op_st8_i32:

View file

@ -2997,7 +2997,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default:
@ -3471,7 +3470,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_st_i64:
return C_O0_I2(r, r);
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
return C_O1_I1(r, r);

View file

@ -1883,7 +1883,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
default:
g_assert_not_reached();
@ -1909,7 +1908,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_ld32u_i64:
case INDEX_op_ld32s_i64:
case INDEX_op_ld_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_qemu_ld_i32:
case INDEX_op_qemu_ld_i64:

View file

@ -1068,6 +1068,23 @@ QEMU_BUILD_BUG_ON((int)(offsetof(CPUNegativeOffsetState, tlb.f[0]) -
< MIN_TLB_MASK_TABLE_OFS);
#endif
#if TCG_TARGET_REG_BITS == 64
/*
* We require these functions for slow-path function calls.
* Adapt them generically for opcode output.
*/
static void tgen_exts_i32_i64(TCGContext *s, TCGType t, TCGReg a0, TCGReg a1)
{
tcg_out_exts_i32_i64(s, a0, a1);
}
static const TCGOutOpUnary outop_exts_i32_i64 = {
.base.static_constraint = C_O1_I1(r, r),
.out_rr = tgen_exts_i32_i64,
};
#endif
/*
* Register V as the TCGOutOp for O.
* This verifies that V is of type T, otherwise give a nice compiler error.
@ -1122,6 +1139,7 @@ static const TCGOutOp * const all_outop[NB_OPS] = {
OUTOP(INDEX_op_setcond2_i32, TCGOutOpSetcond2, outop_setcond2),
#else
OUTOP(INDEX_op_bswap64, TCGOutOpUnary, outop_bswap64),
OUTOP(INDEX_op_ext_i32_i64, TCGOutOpUnary, outop_exts_i32_i64),
#endif
};
@ -5412,9 +5430,6 @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
/* emit instruction */
TCGType type = TCGOP_TYPE(op);
switch (op->opc) {
case INDEX_op_ext_i32_i64:
tcg_out_exts_i32_i64(s, new_args[0], new_args[1]);
break;
case INDEX_op_extu_i32_i64:
tcg_out_extu_i32_i64(s, new_args[0], new_args[1]);
break;
@ -5477,6 +5492,7 @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
break;
case INDEX_op_bswap64:
case INDEX_op_ext_i32_i64:
assert(TCG_TARGET_REG_BITS == 64);
/* fall through */
case INDEX_op_ctpop:

View file

@ -55,7 +55,6 @@ tcg_target_op_def(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_ld32u_i64:
case INDEX_op_ld32s_i64:
case INDEX_op_ld_i64:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
return C_O1_I1(r, r);
@ -1109,7 +1108,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext_i32_i64: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_extu_i32_i64:
case INDEX_op_extrl_i64_i32:
default: