tcg: Split out tcg_out_ext8u

We will need a backend interface for performing 8-bit zero-extend.
Use it in tcg_reg_alloc_op in the meantime.

Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-04-05 13:26:51 -07:00
parent 678155b2c5
commit d0e66c897f
11 changed files with 69 additions and 33 deletions

View file

@ -1097,7 +1097,7 @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
tcg_out_insn(s, RRE, LGBR, dest, src);
}
static void tgen_ext8u(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
static void tcg_out_ext8u(TCGContext *s, TCGReg dest, TCGReg src)
{
tcg_out_insn(s, RRE, LLGCR, dest, src);
}
@ -1153,7 +1153,7 @@ static void tgen_andi(TCGContext *s, TCGType type, TCGReg dest, uint64_t val)
return;
}
if ((val & valid) == 0xff) {
tgen_ext8u(s, TCG_TYPE_I64, dest, dest);
tcg_out_ext8u(s, dest, dest);
return;
}
if ((val & valid) == 0xffff) {
@ -1806,7 +1806,7 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
}
switch (opc & MO_SIZE) {
case MO_UB:
tgen_ext8u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
tcg_out_ext8u(s, TCG_REG_R4, data_reg);
break;
case MO_UW:
tgen_ext16u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
@ -2236,9 +2236,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_ext16s_i32:
tgen_ext16s(s, TCG_TYPE_I32, args[0], args[1]);
break;
case INDEX_op_ext8u_i32:
tgen_ext8u(s, TCG_TYPE_I32, args[0], args[1]);
break;
case INDEX_op_ext16u_i32:
tgen_ext16u(s, TCG_TYPE_I32, args[0], args[1]);
break;
@ -2541,9 +2538,6 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_ext32s_i64:
tgen_ext32s(s, args[0], args[1]);
break;
case INDEX_op_ext8u_i64:
tgen_ext8u(s, TCG_TYPE_I64, args[0], args[1]);
break;
case INDEX_op_ext16u_i64:
tgen_ext16u(s, TCG_TYPE_I64, args[0], args[1]);
break;
@ -2640,6 +2634,8 @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
case INDEX_op_ext8s_i64:
case INDEX_op_ext8u_i32:
case INDEX_op_ext8u_i64:
default:
g_assert_not_reached();
}