mirror of
https://github.com/Motorhead1991/qemu.git
synced 2025-08-08 10:13:56 -06:00
tcg: Split out tcg_out_ext8u
We will need a backend interface for performing 8-bit zero-extend. Use it in tcg_reg_alloc_op in the meantime. Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
678155b2c5
commit
d0e66c897f
11 changed files with 69 additions and 33 deletions
|
@ -1432,6 +1432,11 @@ static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
|
|||
tcg_out_ubfm(s, 0, rd, rn, 0, bits);
|
||||
}
|
||||
|
||||
static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rn)
|
||||
{
|
||||
tcg_out_uxt(s, MO_8, rd, rn);
|
||||
}
|
||||
|
||||
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
|
||||
TCGReg rn, int64_t aimm)
|
||||
{
|
||||
|
@ -2243,10 +2248,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
|||
case INDEX_op_ext32s_i64:
|
||||
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
|
||||
break;
|
||||
case INDEX_op_ext8u_i64:
|
||||
case INDEX_op_ext8u_i32:
|
||||
tcg_out_uxt(s, MO_8, a0, a1);
|
||||
break;
|
||||
case INDEX_op_ext16u_i64:
|
||||
case INDEX_op_ext16u_i32:
|
||||
tcg_out_uxt(s, MO_16, a0, a1);
|
||||
|
@ -2313,6 +2314,8 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
|
|||
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
|
||||
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
|
||||
case INDEX_op_ext8s_i64:
|
||||
case INDEX_op_ext8u_i32:
|
||||
case INDEX_op_ext8u_i64:
|
||||
default:
|
||||
g_assert_not_reached();
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue