[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PULL 11/39] target/alpha: Use TCG_COND_TSTNE for gen_fold_mzero
From: |
Richard Henderson |
Subject: |
[PULL 11/39] target/alpha: Use TCG_COND_TSTNE for gen_fold_mzero |
Date: |
Mon, 5 Feb 2024 07:40:24 +1000 |
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
target/alpha/translate.c | 49 +++++++++++++++++++---------------------
1 file changed, 23 insertions(+), 26 deletions(-)
diff --git a/target/alpha/translate.c b/target/alpha/translate.c
index 220eda2137..882cf6cea0 100644
--- a/target/alpha/translate.c
+++ b/target/alpha/translate.c
@@ -490,56 +490,53 @@ static DisasJumpType gen_bcond(DisasContext *ctx, TCGCond
cond, int ra,
/* Fold -0.0 for comparison with COND. */
-static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
+static TCGv_i64 gen_fold_mzero(TCGCond *pcond, uint64_t *pimm, TCGv_i64 src)
{
- uint64_t mzero = 1ull << 63;
+ TCGv_i64 tmp;
- switch (cond) {
+ *pimm = 0;
+ switch (*pcond) {
case TCG_COND_LE:
case TCG_COND_GT:
/* For <= or >, the -0.0 value directly compares the way we want. */
- tcg_gen_mov_i64(dest, src);
- break;
+ return src;
case TCG_COND_EQ:
case TCG_COND_NE:
- /* For == or !=, we can simply mask off the sign bit and compare. */
- tcg_gen_andi_i64(dest, src, mzero - 1);
- break;
+ /* For == or !=, we can compare without the sign bit. */
+ *pcond = *pcond == TCG_COND_EQ ? TCG_COND_TSTEQ : TCG_COND_TSTNE;
+ *pimm = INT64_MAX;
+ return src;
case TCG_COND_GE:
case TCG_COND_LT:
/* For >= or <, map -0.0 to +0.0. */
- tcg_gen_movcond_i64(TCG_COND_NE, dest, src, tcg_constant_i64(mzero),
- src, tcg_constant_i64(0));
- break;
+ tmp = tcg_temp_new_i64();
+ tcg_gen_movcond_i64(TCG_COND_EQ, tmp,
+ src, tcg_constant_i64(INT64_MIN),
+ tcg_constant_i64(0), src);
+ return tmp;
default:
- abort();
+ g_assert_not_reached();
}
}
static DisasJumpType gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
int32_t disp)
{
- TCGv cmp_tmp = tcg_temp_new();
- DisasJumpType ret;
-
- gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
- ret = gen_bcond_internal(ctx, cond, cmp_tmp, 0, disp);
- return ret;
+ uint64_t imm;
+ TCGv_i64 tmp = gen_fold_mzero(&cond, &imm, load_fpr(ctx, ra));
+ return gen_bcond_internal(ctx, cond, tmp, imm, disp);
}
static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
{
- TCGv_i64 va, vb, z;
-
- z = load_zero(ctx);
- vb = load_fpr(ctx, rb);
- va = tcg_temp_new();
- gen_fold_mzero(cond, va, load_fpr(ctx, ra));
-
- tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
+ uint64_t imm;
+ TCGv_i64 tmp = gen_fold_mzero(&cond, &imm, load_fpr(ctx, ra));
+ tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc),
+ tmp, tcg_constant_i64(imm),
+ load_fpr(ctx, rb), load_fpr(ctx, rc));
}
#define QUAL_RM_N 0x080 /* Round mode nearest even */
--
2.34.1
- [PULL 00/39] tcg patch queue, Richard Henderson, 2024/02/04
- [PULL 01/39] tcg: Introduce TCG_COND_TST{EQ,NE}, Richard Henderson, 2024/02/04
- [PULL 02/39] tcg: Introduce TCG_TARGET_HAS_tst, Richard Henderson, 2024/02/04
- [PULL 03/39] tcg/optimize: Split out arg_is_const_val, Richard Henderson, 2024/02/04
- [PULL 06/39] tcg/optimize: Handle TCG_COND_TST{EQ,NE}, Richard Henderson, 2024/02/04
- [PULL 05/39] tcg/optimize: Do swap_commutative2 in do_constant_folding_cond2, Richard Henderson, 2024/02/04
- [PULL 04/39] tcg/optimize: Split out do_constant_folding_cond1, Richard Henderson, 2024/02/04
- [PULL 07/39] tcg/optimize: Lower TCG_COND_TST{EQ,NE} if unsupported, Richard Henderson, 2024/02/04
- [PULL 08/39] target/alpha: Pass immediate value to gen_bcond_internal(), Richard Henderson, 2024/02/04
- [PULL 09/39] target/alpha: Use TCG_COND_TST{EQ,NE} for BLB{C,S}, Richard Henderson, 2024/02/04
- [PULL 11/39] target/alpha: Use TCG_COND_TSTNE for gen_fold_mzero,
Richard Henderson <=
- [PULL 10/39] target/alpha: Use TCG_COND_TST{EQ,NE} for CMOVLB{C,S}, Richard Henderson, 2024/02/04
- [PULL 12/39] target/m68k: Use TCG_COND_TST{EQ,NE} in gen_fcc_cond, Richard Henderson, 2024/02/04
- [PULL 13/39] target/sparc: Use TCG_COND_TSTEQ in gen_op_mulscc, Richard Henderson, 2024/02/04
- [PULL 14/39] target/s390x: Use TCG_COND_TSTNE for CC_OP_{TM,ICM}, Richard Henderson, 2024/02/04
- [PULL 16/39] tcg: Add TCGConst argument to tcg_target_const_match, Richard Henderson, 2024/02/04
- [PULL 17/39] tcg/aarch64: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2024/02/04
- [PULL 18/39] tcg/aarch64: Massage tcg_out_brcond(), Richard Henderson, 2024/02/04
- [PULL 19/39] tcg/aarch64: Generate TBZ, TBNZ, Richard Henderson, 2024/02/04
- [PULL 20/39] tcg/aarch64: Generate CBNZ for TSTNE of UINT32_MAX, Richard Henderson, 2024/02/04
- [PULL 21/39] tcg/arm: Split out tcg_out_cmp(), Richard Henderson, 2024/02/04