[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PATCH v2 02/35] tcg/optimize: Split out arg_is_const_val
From: |
Richard Henderson |
Subject: |
[PATCH v2 02/35] tcg/optimize: Split out arg_is_const_val |
Date: |
Sat, 28 Oct 2023 12:44:49 -0700 |
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/optimize.c | 38 +++++++++++++++++++++++---------------
1 file changed, 23 insertions(+), 15 deletions(-)
diff --git a/tcg/optimize.c b/tcg/optimize.c
index 2db5177c32..e8a13fedb5 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -112,11 +112,22 @@ static inline bool ts_is_const(TCGTemp *ts)
return ts_info(ts)->is_const;
}
+static inline bool ts_is_const_val(TCGTemp *ts, uint64_t val)
+{
+ TempOptInfo *ti = ts_info(ts);
+ return ti->is_const && ti->val == val;
+}
+
static inline bool arg_is_const(TCGArg arg)
{
return ts_is_const(arg_temp(arg));
}
+static inline bool arg_is_const_val(TCGArg arg, uint64_t val)
+{
+ return ts_is_const_val(arg_temp(arg), val);
+}
+
static inline bool ts_is_copy(TCGTemp *ts)
{
return ts_info(ts)->next_copy != ts;
@@ -565,7 +576,7 @@ static int do_constant_folding_cond(TCGType type, TCGArg x,
}
} else if (args_are_copies(x, y)) {
return do_constant_folding_cond_eq(c);
- } else if (arg_is_const(y) && arg_info(y)->val == 0) {
+ } else if (arg_is_const_val(y, 0)) {
switch (c) {
case TCG_COND_LTU:
return 0;
@@ -831,7 +842,7 @@ static bool fold_to_not(OptContext *ctx, TCGOp *op, int idx)
/* If the binary operation has first argument @i, fold to @i. */
static bool fold_ix_to_i(OptContext *ctx, TCGOp *op, uint64_t i)
{
- if (arg_is_const(op->args[1]) && arg_info(op->args[1])->val == i) {
+ if (arg_is_const_val(op->args[1], i)) {
return tcg_opt_gen_movi(ctx, op, op->args[0], i);
}
return false;
@@ -840,7 +851,7 @@ static bool fold_ix_to_i(OptContext *ctx, TCGOp *op,
uint64_t i)
/* If the binary operation has first argument @i, fold to NOT. */
static bool fold_ix_to_not(OptContext *ctx, TCGOp *op, uint64_t i)
{
- if (arg_is_const(op->args[1]) && arg_info(op->args[1])->val == i) {
+ if (arg_is_const_val(op->args[1], i)) {
return fold_to_not(ctx, op, 2);
}
return false;
@@ -849,7 +860,7 @@ static bool fold_ix_to_not(OptContext *ctx, TCGOp *op,
uint64_t i)
/* If the binary operation has second argument @i, fold to @i. */
static bool fold_xi_to_i(OptContext *ctx, TCGOp *op, uint64_t i)
{
- if (arg_is_const(op->args[2]) && arg_info(op->args[2])->val == i) {
+ if (arg_is_const_val(op->args[2], i)) {
return tcg_opt_gen_movi(ctx, op, op->args[0], i);
}
return false;
@@ -858,7 +869,7 @@ static bool fold_xi_to_i(OptContext *ctx, TCGOp *op,
uint64_t i)
/* If the binary operation has second argument @i, fold to identity. */
static bool fold_xi_to_x(OptContext *ctx, TCGOp *op, uint64_t i)
{
- if (arg_is_const(op->args[2]) && arg_info(op->args[2])->val == i) {
+ if (arg_is_const_val(op->args[2], i)) {
return tcg_opt_gen_mov(ctx, op, op->args[0], op->args[1]);
}
return false;
@@ -867,7 +878,7 @@ static bool fold_xi_to_x(OptContext *ctx, TCGOp *op,
uint64_t i)
/* If the binary operation has second argument @i, fold to NOT. */
static bool fold_xi_to_not(OptContext *ctx, TCGOp *op, uint64_t i)
{
- if (arg_is_const(op->args[2]) && arg_info(op->args[2])->val == i) {
+ if (arg_is_const_val(op->args[2], i)) {
return fold_to_not(ctx, op, 1);
}
return false;
@@ -1083,8 +1094,8 @@ static bool fold_brcond2(OptContext *ctx, TCGOp *op)
* Simplify LT/GE comparisons vs zero to a single compare
* vs the high word of the input.
*/
- if (arg_is_const(op->args[2]) && arg_info(op->args[2])->val == 0 &&
- arg_is_const(op->args[3]) && arg_info(op->args[3])->val == 0) {
+ if (arg_is_const_val(op->args[2], 0) &&
+ arg_is_const_val(op->args[3], 0)) {
goto do_brcond_high;
}
break;
@@ -1303,9 +1314,7 @@ static bool fold_deposit(OptContext *ctx, TCGOp *op)
}
/* Inserting a value into zero at offset 0. */
- if (arg_is_const(op->args[1])
- && arg_info(op->args[1])->val == 0
- && op->args[3] == 0) {
+ if (arg_is_const_val(op->args[1], 0) && op->args[3] == 0) {
uint64_t mask = MAKE_64BIT_MASK(0, op->args[4]);
op->opc = and_opc;
@@ -1316,8 +1325,7 @@ static bool fold_deposit(OptContext *ctx, TCGOp *op)
}
/* Inserting zero into a value. */
- if (arg_is_const(op->args[2])
- && arg_info(op->args[2])->val == 0) {
+ if (arg_is_const_val(op->args[2], 0)) {
uint64_t mask = deposit64(-1, op->args[3], op->args[4], 0);
op->opc = and_opc;
@@ -1855,8 +1863,8 @@ static bool fold_setcond2(OptContext *ctx, TCGOp *op)
* Simplify LT/GE comparisons vs zero to a single compare
* vs the high word of the input.
*/
- if (arg_is_const(op->args[3]) && arg_info(op->args[3])->val == 0 &&
- arg_is_const(op->args[4]) && arg_info(op->args[4])->val == 0) {
+ if (arg_is_const_val(op->args[3], 0) &&
+ arg_is_const_val(op->args[4], 0)) {
goto do_setcond_high;
}
break;
--
2.34.1
- [PATCH v2 00/35] tcg: Introduce TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 01/35] tcg: Introduce TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 02/35] tcg/optimize: Split out arg_is_const_val,
Richard Henderson <=
- [PATCH v2 04/35] tcg/optimize: Do swap_commutative2 in do_constant_folding_cond2, Richard Henderson, 2023/10/28
- [PATCH v2 03/35] tcg/optimize: Split out do_constant_folding_cond1, Richard Henderson, 2023/10/28
- [PATCH v2 05/35] tcg/optimize: Split out arg_new_constant, Richard Henderson, 2023/10/28
- [PATCH v2 07/35] tcg: Add TCGConst argument to tcg_target_const_match, Richard Henderson, 2023/10/28
- [PATCH v2 09/35] tcg/aarch64: Generate TBZ, TBNZ, Richard Henderson, 2023/10/28
- [PATCH v2 08/35] tcg/aarch64: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 10/35] tcg/aarch64: Generate CBNZ for TSTNE of UINT32_MAX, Richard Henderson, 2023/10/28
- [PATCH v2 06/35] tcg/optimize: Handle TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28
- [PATCH v2 24/35] tcg/ppc: Use cr0 in tcg_to_bc and tcg_to_isel, Richard Henderson, 2023/10/28
- [PATCH v2 16/35] tcg/loongarch64: Support TCG_COND_TST{EQ,NE}, Richard Henderson, 2023/10/28