mirror of
https://github.com/Motorhead1991/qemu.git
synced 2025-07-27 04:13:53 -06:00
tcg: Merge INDEX_op_st*_{i32,i64}
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org> Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
4a686aa9d9
commit
a28f151d61
6 changed files with 50 additions and 108 deletions
|
@ -86,6 +86,10 @@ DEF(setcond, 1, 2, 1, TCG_OPF_INT)
|
||||||
DEF(sextract, 1, 1, 2, TCG_OPF_INT)
|
DEF(sextract, 1, 1, 2, TCG_OPF_INT)
|
||||||
DEF(shl, 1, 2, 0, TCG_OPF_INT)
|
DEF(shl, 1, 2, 0, TCG_OPF_INT)
|
||||||
DEF(shr, 1, 2, 0, TCG_OPF_INT)
|
DEF(shr, 1, 2, 0, TCG_OPF_INT)
|
||||||
|
DEF(st8, 0, 2, 1, TCG_OPF_INT)
|
||||||
|
DEF(st16, 0, 2, 1, TCG_OPF_INT)
|
||||||
|
DEF(st32, 0, 2, 1, TCG_OPF_INT)
|
||||||
|
DEF(st, 0, 2, 1, TCG_OPF_INT)
|
||||||
DEF(sub, 1, 2, 0, TCG_OPF_INT)
|
DEF(sub, 1, 2, 0, TCG_OPF_INT)
|
||||||
DEF(xor, 1, 2, 0, TCG_OPF_INT)
|
DEF(xor, 1, 2, 0, TCG_OPF_INT)
|
||||||
|
|
||||||
|
@ -99,20 +103,9 @@ DEF(subb1o, 1, 2, 0, TCG_OPF_INT | TCG_OPF_CARRY_OUT)
|
||||||
DEF(subbi, 1, 2, 0, TCG_OPF_INT | TCG_OPF_CARRY_IN)
|
DEF(subbi, 1, 2, 0, TCG_OPF_INT | TCG_OPF_CARRY_IN)
|
||||||
DEF(subbio, 1, 2, 0, TCG_OPF_INT | TCG_OPF_CARRY_IN | TCG_OPF_CARRY_OUT)
|
DEF(subbio, 1, 2, 0, TCG_OPF_INT | TCG_OPF_CARRY_IN | TCG_OPF_CARRY_OUT)
|
||||||
|
|
||||||
/* load/store */
|
|
||||||
DEF(st8_i32, 0, 2, 1, 0)
|
|
||||||
DEF(st16_i32, 0, 2, 1, 0)
|
|
||||||
DEF(st_i32, 0, 2, 1, 0)
|
|
||||||
|
|
||||||
DEF(brcond2_i32, 0, 4, 2, TCG_OPF_BB_END | TCG_OPF_COND_BRANCH)
|
DEF(brcond2_i32, 0, 4, 2, TCG_OPF_BB_END | TCG_OPF_COND_BRANCH)
|
||||||
DEF(setcond2_i32, 1, 4, 1, 0)
|
DEF(setcond2_i32, 1, 4, 1, 0)
|
||||||
|
|
||||||
/* load/store */
|
|
||||||
DEF(st8_i64, 0, 2, 1, 0)
|
|
||||||
DEF(st16_i64, 0, 2, 1, 0)
|
|
||||||
DEF(st32_i64, 0, 2, 1, 0)
|
|
||||||
DEF(st_i64, 0, 2, 1, 0)
|
|
||||||
|
|
||||||
/* size changing ops */
|
/* size changing ops */
|
||||||
DEF(ext_i32_i64, 1, 1, 0, 0)
|
DEF(ext_i32_i64, 1, 1, 0, 0)
|
||||||
DEF(extu_i32_i64, 1, 1, 0, 0)
|
DEF(extu_i32_i64, 1, 1, 0, 0)
|
||||||
|
|
|
@ -30,14 +30,6 @@
|
||||||
#include "tcg-internal.h"
|
#include "tcg-internal.h"
|
||||||
#include "tcg-has.h"
|
#include "tcg-has.h"
|
||||||
|
|
||||||
#define CASE_OP_32_64(x) \
|
|
||||||
glue(glue(case INDEX_op_, x), _i32): \
|
|
||||||
glue(glue(case INDEX_op_, x), _i64)
|
|
||||||
|
|
||||||
#define CASE_OP_32_64_VEC(x) \
|
|
||||||
glue(glue(case INDEX_op_, x), _i32): \
|
|
||||||
glue(glue(case INDEX_op_, x), _i64): \
|
|
||||||
glue(glue(case INDEX_op_, x), _vec)
|
|
||||||
|
|
||||||
typedef struct MemCopyInfo {
|
typedef struct MemCopyInfo {
|
||||||
IntervalTreeNode itree;
|
IntervalTreeNode itree;
|
||||||
|
@ -2938,19 +2930,16 @@ static bool fold_tcg_st(OptContext *ctx, TCGOp *op)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (op->opc) {
|
switch (op->opc) {
|
||||||
CASE_OP_32_64(st8):
|
case INDEX_op_st8:
|
||||||
lm1 = 0;
|
lm1 = 0;
|
||||||
break;
|
break;
|
||||||
CASE_OP_32_64(st16):
|
case INDEX_op_st16:
|
||||||
lm1 = 1;
|
lm1 = 1;
|
||||||
break;
|
break;
|
||||||
case INDEX_op_st32_i64:
|
case INDEX_op_st32:
|
||||||
case INDEX_op_st_i32:
|
|
||||||
lm1 = 3;
|
lm1 = 3;
|
||||||
break;
|
break;
|
||||||
case INDEX_op_st_i64:
|
case INDEX_op_st:
|
||||||
lm1 = 7;
|
|
||||||
break;
|
|
||||||
case INDEX_op_st_vec:
|
case INDEX_op_st_vec:
|
||||||
lm1 = tcg_type_size(ctx->type) - 1;
|
lm1 = tcg_type_size(ctx->type) - 1;
|
||||||
break;
|
break;
|
||||||
|
@ -3138,13 +3127,12 @@ void tcg_optimize(TCGContext *s)
|
||||||
case INDEX_op_ld_vec:
|
case INDEX_op_ld_vec:
|
||||||
done = fold_tcg_ld_memcopy(&ctx, op);
|
done = fold_tcg_ld_memcopy(&ctx, op);
|
||||||
break;
|
break;
|
||||||
CASE_OP_32_64(st8):
|
case INDEX_op_st8:
|
||||||
CASE_OP_32_64(st16):
|
case INDEX_op_st16:
|
||||||
case INDEX_op_st32_i64:
|
case INDEX_op_st32:
|
||||||
done = fold_tcg_st(&ctx, op);
|
done = fold_tcg_st(&ctx, op);
|
||||||
break;
|
break;
|
||||||
case INDEX_op_st_i32:
|
case INDEX_op_st:
|
||||||
case INDEX_op_st_i64:
|
|
||||||
case INDEX_op_st_vec:
|
case INDEX_op_st_vec:
|
||||||
done = fold_tcg_st_memcopy(&ctx, op);
|
done = fold_tcg_st_memcopy(&ctx, op);
|
||||||
break;
|
break;
|
||||||
|
|
14
tcg/tcg-op.c
14
tcg/tcg-op.c
|
@ -1404,17 +1404,17 @@ void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
|
|
||||||
void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
|
tcg_gen_ldst_op_i32(INDEX_op_st8, arg1, arg2, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
|
tcg_gen_ldst_op_i32(INDEX_op_st16, arg1, arg2, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
|
tcg_gen_ldst_op_i32(INDEX_op_st, arg1, arg2, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1540,7 +1540,7 @@ void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
if (TCG_TARGET_REG_BITS == 64) {
|
if (TCG_TARGET_REG_BITS == 64) {
|
||||||
tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
|
tcg_gen_ldst_op_i64(INDEX_op_st8, arg1, arg2, offset);
|
||||||
} else {
|
} else {
|
||||||
tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
|
tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
|
||||||
}
|
}
|
||||||
|
@ -1549,7 +1549,7 @@ void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
if (TCG_TARGET_REG_BITS == 64) {
|
if (TCG_TARGET_REG_BITS == 64) {
|
||||||
tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
|
tcg_gen_ldst_op_i64(INDEX_op_st16, arg1, arg2, offset);
|
||||||
} else {
|
} else {
|
||||||
tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
|
tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
|
||||||
}
|
}
|
||||||
|
@ -1558,7 +1558,7 @@ void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
if (TCG_TARGET_REG_BITS == 64) {
|
if (TCG_TARGET_REG_BITS == 64) {
|
||||||
tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
|
tcg_gen_ldst_op_i64(INDEX_op_st32, arg1, arg2, offset);
|
||||||
} else {
|
} else {
|
||||||
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
|
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
|
||||||
}
|
}
|
||||||
|
@ -1567,7 +1567,7 @@ void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
|
||||||
{
|
{
|
||||||
if (TCG_TARGET_REG_BITS == 64) {
|
if (TCG_TARGET_REG_BITS == 64) {
|
||||||
tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
|
tcg_gen_ldst_op_i64(INDEX_op_st, arg1, arg2, offset);
|
||||||
} else if (HOST_BIG_ENDIAN) {
|
} else if (HOST_BIG_ENDIAN) {
|
||||||
tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
|
tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
|
||||||
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
|
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
|
||||||
|
|
45
tcg/tcg.c
45
tcg/tcg.c
|
@ -1219,12 +1219,9 @@ static const TCGOutOp * const all_outop[NB_OPS] = {
|
||||||
OUTOP(INDEX_op_sextract, TCGOutOpExtract, outop_sextract),
|
OUTOP(INDEX_op_sextract, TCGOutOpExtract, outop_sextract),
|
||||||
OUTOP(INDEX_op_shl, TCGOutOpBinary, outop_shl),
|
OUTOP(INDEX_op_shl, TCGOutOpBinary, outop_shl),
|
||||||
OUTOP(INDEX_op_shr, TCGOutOpBinary, outop_shr),
|
OUTOP(INDEX_op_shr, TCGOutOpBinary, outop_shr),
|
||||||
OUTOP(INDEX_op_st_i32, TCGOutOpStore, outop_st),
|
OUTOP(INDEX_op_st, TCGOutOpStore, outop_st),
|
||||||
OUTOP(INDEX_op_st_i64, TCGOutOpStore, outop_st),
|
OUTOP(INDEX_op_st8, TCGOutOpStore, outop_st8),
|
||||||
OUTOP(INDEX_op_st8_i32, TCGOutOpStore, outop_st8),
|
OUTOP(INDEX_op_st16, TCGOutOpStore, outop_st16),
|
||||||
OUTOP(INDEX_op_st8_i64, TCGOutOpStore, outop_st8),
|
|
||||||
OUTOP(INDEX_op_st16_i32, TCGOutOpStore, outop_st16),
|
|
||||||
OUTOP(INDEX_op_st16_i64, TCGOutOpStore, outop_st16),
|
|
||||||
OUTOP(INDEX_op_sub, TCGOutOpSubtract, outop_sub),
|
OUTOP(INDEX_op_sub, TCGOutOpSubtract, outop_sub),
|
||||||
OUTOP(INDEX_op_subbi, TCGOutOpAddSubCarry, outop_subbi),
|
OUTOP(INDEX_op_subbi, TCGOutOpAddSubCarry, outop_subbi),
|
||||||
OUTOP(INDEX_op_subbio, TCGOutOpAddSubCarry, outop_subbio),
|
OUTOP(INDEX_op_subbio, TCGOutOpAddSubCarry, outop_subbio),
|
||||||
|
@ -1246,7 +1243,7 @@ static const TCGOutOp * const all_outop[NB_OPS] = {
|
||||||
OUTOP(INDEX_op_extrh_i64_i32, TCGOutOpUnary, outop_extrh_i64_i32),
|
OUTOP(INDEX_op_extrh_i64_i32, TCGOutOpUnary, outop_extrh_i64_i32),
|
||||||
OUTOP(INDEX_op_ld32u, TCGOutOpLoad, outop_ld32u),
|
OUTOP(INDEX_op_ld32u, TCGOutOpLoad, outop_ld32u),
|
||||||
OUTOP(INDEX_op_ld32s, TCGOutOpLoad, outop_ld32s),
|
OUTOP(INDEX_op_ld32s, TCGOutOpLoad, outop_ld32s),
|
||||||
OUTOP(INDEX_op_st32_i64, TCGOutOpStore, outop_st),
|
OUTOP(INDEX_op_st32, TCGOutOpStore, outop_st),
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2464,24 +2461,19 @@ bool tcg_op_supported(TCGOpcode op, TCGType type, unsigned flags)
|
||||||
case INDEX_op_or:
|
case INDEX_op_or:
|
||||||
case INDEX_op_setcond:
|
case INDEX_op_setcond:
|
||||||
case INDEX_op_sextract:
|
case INDEX_op_sextract:
|
||||||
|
case INDEX_op_st8:
|
||||||
|
case INDEX_op_st16:
|
||||||
|
case INDEX_op_st:
|
||||||
case INDEX_op_xor:
|
case INDEX_op_xor:
|
||||||
return has_type;
|
return has_type;
|
||||||
|
|
||||||
case INDEX_op_st8_i32:
|
|
||||||
case INDEX_op_st16_i32:
|
|
||||||
case INDEX_op_st_i32:
|
|
||||||
return true;
|
|
||||||
|
|
||||||
case INDEX_op_brcond2_i32:
|
case INDEX_op_brcond2_i32:
|
||||||
case INDEX_op_setcond2_i32:
|
case INDEX_op_setcond2_i32:
|
||||||
return TCG_TARGET_REG_BITS == 32;
|
return TCG_TARGET_REG_BITS == 32;
|
||||||
|
|
||||||
case INDEX_op_ld32u:
|
case INDEX_op_ld32u:
|
||||||
case INDEX_op_ld32s:
|
case INDEX_op_ld32s:
|
||||||
case INDEX_op_st8_i64:
|
case INDEX_op_st32:
|
||||||
case INDEX_op_st16_i64:
|
|
||||||
case INDEX_op_st32_i64:
|
|
||||||
case INDEX_op_st_i64:
|
|
||||||
case INDEX_op_ext_i32_i64:
|
case INDEX_op_ext_i32_i64:
|
||||||
case INDEX_op_extu_i32_i64:
|
case INDEX_op_extu_i32_i64:
|
||||||
case INDEX_op_extrl_i64_i32:
|
case INDEX_op_extrl_i64_i32:
|
||||||
|
@ -4494,10 +4486,7 @@ liveness_pass_2(TCGContext *s)
|
||||||
arg_ts->state = 0;
|
arg_ts->state = 0;
|
||||||
|
|
||||||
if (NEED_SYNC_ARG(0)) {
|
if (NEED_SYNC_ARG(0)) {
|
||||||
TCGOpcode sopc = (arg_ts->type == TCG_TYPE_I32
|
TCGOp *sop = tcg_op_insert_after(s, op, INDEX_op_st,
|
||||||
? INDEX_op_st_i32
|
|
||||||
: INDEX_op_st_i64);
|
|
||||||
TCGOp *sop = tcg_op_insert_after(s, op, sopc,
|
|
||||||
arg_ts->type, 3);
|
arg_ts->type, 3);
|
||||||
TCGTemp *out_ts = dir_ts;
|
TCGTemp *out_ts = dir_ts;
|
||||||
|
|
||||||
|
@ -4531,10 +4520,7 @@ liveness_pass_2(TCGContext *s)
|
||||||
|
|
||||||
/* Sync outputs upon their last write. */
|
/* Sync outputs upon their last write. */
|
||||||
if (NEED_SYNC_ARG(i)) {
|
if (NEED_SYNC_ARG(i)) {
|
||||||
TCGOpcode sopc = (arg_ts->type == TCG_TYPE_I32
|
TCGOp *sop = tcg_op_insert_after(s, op, INDEX_op_st,
|
||||||
? INDEX_op_st_i32
|
|
||||||
: INDEX_op_st_i64);
|
|
||||||
TCGOp *sop = tcg_op_insert_after(s, op, sopc,
|
|
||||||
arg_ts->type, 3);
|
arg_ts->type, 3);
|
||||||
|
|
||||||
sop->args[0] = temp_arg(dir_ts);
|
sop->args[0] = temp_arg(dir_ts);
|
||||||
|
@ -5794,16 +5780,13 @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case INDEX_op_st32_i64:
|
case INDEX_op_st32:
|
||||||
/* Use tcg_op_st w/ I32. */
|
/* Use tcg_op_st w/ I32. */
|
||||||
type = TCG_TYPE_I32;
|
type = TCG_TYPE_I32;
|
||||||
/* fall through */
|
/* fall through */
|
||||||
case INDEX_op_st_i32:
|
case INDEX_op_st:
|
||||||
case INDEX_op_st_i64:
|
case INDEX_op_st8:
|
||||||
case INDEX_op_st8_i32:
|
case INDEX_op_st16:
|
||||||
case INDEX_op_st8_i64:
|
|
||||||
case INDEX_op_st16_i32:
|
|
||||||
case INDEX_op_st16_i64:
|
|
||||||
{
|
{
|
||||||
const TCGOutOpStore *out =
|
const TCGOutOpStore *out =
|
||||||
container_of(all_outop[op->opc], TCGOutOpStore, base);
|
container_of(all_outop[op->opc], TCGOutOpStore, base);
|
||||||
|
|
36
tcg/tci.c
36
tcg/tci.c
|
@ -325,18 +325,6 @@ static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#if TCG_TARGET_REG_BITS == 64
|
|
||||||
# define CASE_32_64(x) \
|
|
||||||
case glue(glue(INDEX_op_, x), _i64): \
|
|
||||||
case glue(glue(INDEX_op_, x), _i32):
|
|
||||||
# define CASE_64(x) \
|
|
||||||
case glue(glue(INDEX_op_, x), _i64):
|
|
||||||
#else
|
|
||||||
# define CASE_32_64(x) \
|
|
||||||
case glue(glue(INDEX_op_, x), _i32):
|
|
||||||
# define CASE_64(x)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Interpret pseudo code in tb. */
|
/* Interpret pseudo code in tb. */
|
||||||
/*
|
/*
|
||||||
* Disable CFI checks.
|
* Disable CFI checks.
|
||||||
|
@ -491,21 +479,20 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
|
||||||
ptr = (void *)(regs[r1] + ofs);
|
ptr = (void *)(regs[r1] + ofs);
|
||||||
regs[r0] = *(tcg_target_ulong *)ptr;
|
regs[r0] = *(tcg_target_ulong *)ptr;
|
||||||
break;
|
break;
|
||||||
CASE_32_64(st8)
|
case INDEX_op_st8:
|
||||||
tci_args_rrs(insn, &r0, &r1, &ofs);
|
tci_args_rrs(insn, &r0, &r1, &ofs);
|
||||||
ptr = (void *)(regs[r1] + ofs);
|
ptr = (void *)(regs[r1] + ofs);
|
||||||
*(uint8_t *)ptr = regs[r0];
|
*(uint8_t *)ptr = regs[r0];
|
||||||
break;
|
break;
|
||||||
CASE_32_64(st16)
|
case INDEX_op_st16:
|
||||||
tci_args_rrs(insn, &r0, &r1, &ofs);
|
tci_args_rrs(insn, &r0, &r1, &ofs);
|
||||||
ptr = (void *)(regs[r1] + ofs);
|
ptr = (void *)(regs[r1] + ofs);
|
||||||
*(uint16_t *)ptr = regs[r0];
|
*(uint16_t *)ptr = regs[r0];
|
||||||
break;
|
break;
|
||||||
case INDEX_op_st_i32:
|
case INDEX_op_st:
|
||||||
CASE_64(st32)
|
|
||||||
tci_args_rrs(insn, &r0, &r1, &ofs);
|
tci_args_rrs(insn, &r0, &r1, &ofs);
|
||||||
ptr = (void *)(regs[r1] + ofs);
|
ptr = (void *)(regs[r1] + ofs);
|
||||||
*(uint32_t *)ptr = regs[r0];
|
*(tcg_target_ulong *)ptr = regs[r0];
|
||||||
break;
|
break;
|
||||||
|
|
||||||
/* Arithmetic operations (mixed 32/64 bit). */
|
/* Arithmetic operations (mixed 32/64 bit). */
|
||||||
|
@ -725,10 +712,10 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
|
||||||
ptr = (void *)(regs[r1] + ofs);
|
ptr = (void *)(regs[r1] + ofs);
|
||||||
regs[r0] = *(int32_t *)ptr;
|
regs[r0] = *(int32_t *)ptr;
|
||||||
break;
|
break;
|
||||||
case INDEX_op_st_i64:
|
case INDEX_op_st32:
|
||||||
tci_args_rrs(insn, &r0, &r1, &ofs);
|
tci_args_rrs(insn, &r0, &r1, &ofs);
|
||||||
ptr = (void *)(regs[r1] + ofs);
|
ptr = (void *)(regs[r1] + ofs);
|
||||||
*(uint64_t *)ptr = regs[r0];
|
*(uint32_t *)ptr = regs[r0];
|
||||||
break;
|
break;
|
||||||
|
|
||||||
/* Arithmetic operations (64 bit). */
|
/* Arithmetic operations (64 bit). */
|
||||||
|
@ -975,13 +962,10 @@ int print_insn_tci(bfd_vma addr, disassemble_info *info)
|
||||||
case INDEX_op_ld16s:
|
case INDEX_op_ld16s:
|
||||||
case INDEX_op_ld32u:
|
case INDEX_op_ld32u:
|
||||||
case INDEX_op_ld:
|
case INDEX_op_ld:
|
||||||
case INDEX_op_st8_i32:
|
case INDEX_op_st8:
|
||||||
case INDEX_op_st8_i64:
|
case INDEX_op_st16:
|
||||||
case INDEX_op_st16_i32:
|
case INDEX_op_st32:
|
||||||
case INDEX_op_st16_i64:
|
case INDEX_op_st:
|
||||||
case INDEX_op_st32_i64:
|
|
||||||
case INDEX_op_st_i32:
|
|
||||||
case INDEX_op_st_i64:
|
|
||||||
tci_args_rrs(insn, &r0, &r1, &s2);
|
tci_args_rrs(insn, &r0, &r1, &s2);
|
||||||
info->fprintf_func(info->stream, "%-12s %s, %s, %d",
|
info->fprintf_func(info->stream, "%-12s %s, %s, %d",
|
||||||
op_name, str_r(r0), str_r(r1), s2);
|
op_name, str_r(r0), str_r(r1), s2);
|
||||||
|
|
|
@ -1173,7 +1173,7 @@ static const TCGOutOpLoad outop_ld32s = {
|
||||||
static void tgen_st8(TCGContext *s, TCGType type, TCGReg data,
|
static void tgen_st8(TCGContext *s, TCGType type, TCGReg data,
|
||||||
TCGReg base, ptrdiff_t offset)
|
TCGReg base, ptrdiff_t offset)
|
||||||
{
|
{
|
||||||
tcg_out_ldst(s, INDEX_op_st8_i32, data, base, offset);
|
tcg_out_ldst(s, INDEX_op_st8, data, base, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
static const TCGOutOpStore outop_st8 = {
|
static const TCGOutOpStore outop_st8 = {
|
||||||
|
@ -1184,7 +1184,7 @@ static const TCGOutOpStore outop_st8 = {
|
||||||
static void tgen_st16(TCGContext *s, TCGType type, TCGReg data,
|
static void tgen_st16(TCGContext *s, TCGType type, TCGReg data,
|
||||||
TCGReg base, ptrdiff_t offset)
|
TCGReg base, ptrdiff_t offset)
|
||||||
{
|
{
|
||||||
tcg_out_ldst(s, INDEX_op_st16_i32, data, base, offset);
|
tcg_out_ldst(s, INDEX_op_st16, data, base, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
static const TCGOutOpStore outop_st16 = {
|
static const TCGOutOpStore outop_st16 = {
|
||||||
|
@ -1232,18 +1232,12 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
|
||||||
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
|
static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
|
||||||
intptr_t offset)
|
intptr_t offset)
|
||||||
{
|
{
|
||||||
switch (type) {
|
TCGOpcode op = INDEX_op_st;
|
||||||
case TCG_TYPE_I32:
|
|
||||||
tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
|
if (TCG_TARGET_REG_BITS == 64 && type == TCG_TYPE_I32) {
|
||||||
break;
|
op = INDEX_op_st32;
|
||||||
#if TCG_TARGET_REG_BITS == 64
|
|
||||||
case TCG_TYPE_I64:
|
|
||||||
tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
|
|
||||||
break;
|
|
||||||
#endif
|
|
||||||
default:
|
|
||||||
g_assert_not_reached();
|
|
||||||
}
|
}
|
||||||
|
tcg_out_ldst(s, op, val, base, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
|
static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue