tcg/riscv: Rationalize args to tcg_out_qemu_{ld,st}

Interpret the variable argument placement in the caller.  Pass data_type
instead of is64 -- there are several places where we already convert back
from bool to type.  Clean things up by using type throughout.

Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Reviewed-by: Daniel Henrique Barboza <dbarboza@ventanamicro.com>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
Richard Henderson 2023-04-06 13:27:16 -07:00
parent aeb6326ec5
commit f7041977a6
1 changed files with 24 additions and 42 deletions

View File

@ -1087,7 +1087,7 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
#endif /* CONFIG_SOFTMMU */ #endif /* CONFIG_SOFTMMU */
static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg val, static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg val,
TCGReg base, MemOp opc, bool is_64) TCGReg base, MemOp opc, TCGType type)
{ {
/* Byte swapping is left to middle-end expansion. */ /* Byte swapping is left to middle-end expansion. */
tcg_debug_assert((opc & MO_BSWAP) == 0); tcg_debug_assert((opc & MO_BSWAP) == 0);
@ -1106,7 +1106,7 @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg val,
tcg_out_opc_imm(s, OPC_LH, val, base, 0); tcg_out_opc_imm(s, OPC_LH, val, base, 0);
break; break;
case MO_UL: case MO_UL:
if (is_64) { if (type == TCG_TYPE_I64) {
tcg_out_opc_imm(s, OPC_LWU, val, base, 0); tcg_out_opc_imm(s, OPC_LWU, val, base, 0);
break; break;
} }
@ -1122,30 +1122,21 @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg val,
} }
} }
static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64) static void tcg_out_qemu_ld(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
MemOpIdx oi, TCGType data_type)
{ {
TCGReg addr_reg, data_reg; MemOp opc = get_memop(oi);
MemOpIdx oi;
MemOp opc;
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
#else
unsigned a_bits;
#endif
TCGReg base; TCGReg base;
data_reg = *args++;
addr_reg = *args++;
oi = *args++;
opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU) #if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
base = tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 1); base = tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 1);
tcg_out_qemu_ld_direct(s, data_reg, base, opc, is_64); tcg_out_qemu_ld_direct(s, data_reg, base, opc, data_type);
add_qemu_ldst_label(s, 1, oi, (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32), add_qemu_ldst_label(s, true, oi, data_type, data_reg, addr_reg,
data_reg, addr_reg, s->code_ptr, label_ptr); s->code_ptr, label_ptr);
#else #else
a_bits = get_alignment_bits(opc); unsigned a_bits = get_alignment_bits(opc);
if (a_bits) { if (a_bits) {
tcg_out_test_alignment(s, true, addr_reg, a_bits); tcg_out_test_alignment(s, true, addr_reg, a_bits);
} }
@ -1158,7 +1149,7 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP0, TCG_GUEST_BASE_REG, base); tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP0, TCG_GUEST_BASE_REG, base);
base = TCG_REG_TMP0; base = TCG_REG_TMP0;
} }
tcg_out_qemu_ld_direct(s, data_reg, base, opc, is_64); tcg_out_qemu_ld_direct(s, data_reg, base, opc, data_type);
#endif #endif
} }
@ -1186,30 +1177,21 @@ static void tcg_out_qemu_st_direct(TCGContext *s, TCGReg val,
} }
} }
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64) static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
MemOpIdx oi, TCGType data_type)
{ {
TCGReg addr_reg, data_reg; MemOp opc = get_memop(oi);
MemOpIdx oi;
MemOp opc;
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
#else
unsigned a_bits;
#endif
TCGReg base; TCGReg base;
data_reg = *args++;
addr_reg = *args++;
oi = *args++;
opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU) #if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
base = tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 0); base = tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 0);
tcg_out_qemu_st_direct(s, data_reg, base, opc); tcg_out_qemu_st_direct(s, data_reg, base, opc);
add_qemu_ldst_label(s, 0, oi, (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32), add_qemu_ldst_label(s, false, oi, data_type, data_reg, addr_reg,
data_reg, addr_reg, s->code_ptr, label_ptr); s->code_ptr, label_ptr);
#else #else
a_bits = get_alignment_bits(opc); unsigned a_bits = get_alignment_bits(opc);
if (a_bits) { if (a_bits) {
tcg_out_test_alignment(s, false, addr_reg, a_bits); tcg_out_test_alignment(s, false, addr_reg, a_bits);
} }
@ -1508,16 +1490,16 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break; break;
case INDEX_op_qemu_ld_i32: case INDEX_op_qemu_ld_i32:
tcg_out_qemu_ld(s, args, false); tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break; break;
case INDEX_op_qemu_ld_i64: case INDEX_op_qemu_ld_i64:
tcg_out_qemu_ld(s, args, true); tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break; break;
case INDEX_op_qemu_st_i32: case INDEX_op_qemu_st_i32:
tcg_out_qemu_st(s, args, false); tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break; break;
case INDEX_op_qemu_st_i64: case INDEX_op_qemu_st_i64:
tcg_out_qemu_st(s, args, true); tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break; break;
case INDEX_op_extrh_i64_i32: case INDEX_op_extrh_i64_i32: