[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH 28/42] tcg/riscv: Expand arguments to tcg_out_qemu_{ld,st}
From: |
Richard Henderson |
Subject: |
[PATCH 28/42] tcg/riscv: Expand arguments to tcg_out_qemu_{ld,st} |
Date: |
Fri, 7 Apr 2023 19:43:00 -0700 |
Now that the host is always 64-bit, the address and
data operands are always one operand each. In addition,
change to using TCGType to describe the data operand.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/riscv/tcg-target.c.inc | 47 +++++++++++++++-----------------------
1 file changed, 18 insertions(+), 29 deletions(-)
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
index 1edc3b1c4d..6059802d9a 100644
--- a/tcg/riscv/tcg-target.c.inc
+++ b/tcg/riscv/tcg-target.c.inc
@@ -1101,7 +1101,7 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s,
TCGLabelQemuLdst *l)
#endif /* CONFIG_SOFTMMU */
static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg val,
- TCGReg base, MemOp opc, bool is_64)
+ TCGReg base, MemOp opc, TCGType type)
{
/* Byte swapping is left to middle-end expansion. */
tcg_debug_assert((opc & MO_BSWAP) == 0);
@@ -1120,7 +1120,7 @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg
val,
tcg_out_opc_imm(s, OPC_LH, val, base, 0);
break;
case MO_UL:
- if (is_64) {
+ if (type == TCG_TYPE_I64) {
tcg_out_opc_imm(s, OPC_LWU, val, base, 0);
break;
}
@@ -1136,11 +1136,10 @@ static void tcg_out_qemu_ld_direct(TCGContext *s,
TCGReg val,
}
}
-static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
+static void tcg_out_qemu_ld(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
+ MemOpIdx oi, TCGType d_type)
{
- TCGReg addr_reg, data_reg;
- MemOpIdx oi;
- MemOp opc;
+ MemOp opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
#else
@@ -1148,16 +1147,11 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg
*args, bool is_64)
#endif
TCGReg base;
- data_reg = *args++;
- addr_reg = *args++;
- oi = *args++;
- opc = get_memop(oi);
-
#if defined(CONFIG_SOFTMMU)
base = tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 1);
- tcg_out_qemu_ld_direct(s, data_reg, base, opc, is_64);
- add_qemu_ldst_label(s, 1, oi, (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32),
- data_reg, addr_reg, s->code_ptr, label_ptr);
+ tcg_out_qemu_ld_direct(s, data_reg, base, opc, d_type);
+ add_qemu_ldst_label(s, true, oi, d_type, data_reg, addr_reg,
+ s->code_ptr, label_ptr);
#else
a_bits = get_alignment_bits(opc);
if (a_bits) {
@@ -1172,7 +1166,7 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg
*args, bool is_64)
tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP0, TCG_GUEST_BASE_REG, base);
base = TCG_REG_TMP0;
}
- tcg_out_qemu_ld_direct(s, data_reg, base, opc, is_64);
+ tcg_out_qemu_ld_direct(s, data_reg, base, opc, d_type);
#endif
}
@@ -1200,11 +1194,10 @@ static void tcg_out_qemu_st_direct(TCGContext *s,
TCGReg val,
}
}
-static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64)
+static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
+ MemOpIdx oi, TCGType d_type)
{
- TCGReg addr_reg, data_reg;
- MemOpIdx oi;
- MemOp opc;
+ MemOp opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
#else
@@ -1212,16 +1205,12 @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg
*args, bool is_64)
#endif
TCGReg base;
- data_reg = *args++;
- addr_reg = *args++;
- oi = *args++;
- opc = get_memop(oi);
#if defined(CONFIG_SOFTMMU)
base = tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 0);
tcg_out_qemu_st_direct(s, data_reg, base, opc);
- add_qemu_ldst_label(s, 0, oi, (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32),
- data_reg, addr_reg, s->code_ptr, label_ptr);
+ add_qemu_ldst_label(s, false, oi, d_type, data_reg, addr_reg,
+ s->code_ptr, label_ptr);
#else
a_bits = get_alignment_bits(opc);
if (a_bits) {
@@ -1528,16 +1517,16 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;
case INDEX_op_qemu_ld_i32:
- tcg_out_qemu_ld(s, args, false);
+ tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break;
case INDEX_op_qemu_ld_i64:
- tcg_out_qemu_ld(s, args, true);
+ tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break;
case INDEX_op_qemu_st_i32:
- tcg_out_qemu_st(s, args, false);
+ tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break;
case INDEX_op_qemu_st_i64:
- tcg_out_qemu_st(s, args, true);
+ tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break;
case INDEX_op_extrh_i64_i32:
--
2.34.1
- [PATCH 21/42] tcg/aarch64: Rename ext to d_type in tcg_out_qemu_ld, (continued)
- [PATCH 21/42] tcg/aarch64: Rename ext to d_type in tcg_out_qemu_ld, Richard Henderson, 2023/04/07
- [PATCH 22/42] tcg/aarch64: Pass TGType to tcg_out_qemu_st, Richard Henderson, 2023/04/07
- [PATCH 23/42] tcg/arm: Use TCGType not bool is_64 in tcg_out_qemu_{ld, st}, Richard Henderson, 2023/04/07
- [PATCH 24/42] tcg/i386: Use TCGType not bool is_64 in tcg_out_qemu_{ld, st}, Richard Henderson, 2023/04/07
- [PATCH 25/42] tcg/ppc: Use TCGType not bool is_64 in tcg_out_qemu_{ld, st}, Richard Henderson, 2023/04/07
- [PATCH 26/42] tcg/s390x: Pass TCGType to tcg_out_qemu_{ld,st}, Richard Henderson, 2023/04/07
- [PATCH 28/42] tcg/riscv: Expand arguments to tcg_out_qemu_{ld,st},
Richard Henderson <=
- [PATCH 27/42] tcg/riscv: Require TCG_TARGET_REG_BITS == 64, Richard Henderson, 2023/04/07
- [PATCH 29/42] tcg: Move TCGLabelQemuLdst to tcg.c, Richard Henderson, 2023/04/07
- [PATCH 30/42] tcg: Introduce tcg_out_ld_helper_args, Richard Henderson, 2023/04/07
- [PATCH 31/42] tcg: Introduce tcg_out_st_helper_args, Richard Henderson, 2023/04/07
- [PATCH 32/42] tcg/loongarch64: Simplify constraints on qemu_ld/st, Richard Henderson, 2023/04/07
- [PATCH 34/42] tcg/mips: Simplify constraints on qemu_ld/st, Richard Henderson, 2023/04/07
- [PATCH 37/42] tcg/ppc: Remove unused constraints A, B, C, D, Richard Henderson, 2023/04/07
- [PATCH 36/42] tcg/ppc: Adjust constraints on qemu_ld/st, Richard Henderson, 2023/04/07
- [PATCH 38/42] tcg/riscv: Simplify constraints on qemu_ld/st, Richard Henderson, 2023/04/07
- [PATCH 35/42] tcg/ppc: Reorg tcg_out_tlb_read, Richard Henderson, 2023/04/07