[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[PULL 23/72] tcg/optimize: Use fold_masks_zs in fold_exts
From: |
Richard Henderson |
Subject: |
[PULL 23/72] tcg/optimize: Use fold_masks_zs in fold_exts |
Date: |
Tue, 24 Dec 2024 12:04:32 -0800 |
Avoid the use of the OptContext slots. Find TempOptInfo once.
Explicitly sign-extend z_mask instead of doing that manually.
Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/optimize.c | 29 ++++++++++++-----------------
1 file changed, 12 insertions(+), 17 deletions(-)
diff --git a/tcg/optimize.c b/tcg/optimize.c
index 04ec6fdcef..3aafe039ed 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -1778,49 +1778,44 @@ static bool fold_extract2(OptContext *ctx, TCGOp *op)
static bool fold_exts(OptContext *ctx, TCGOp *op)
{
- uint64_t s_mask_old, s_mask, z_mask, sign;
+ uint64_t s_mask_old, s_mask, z_mask;
bool type_change = false;
+ TempOptInfo *t1;
if (fold_const1(ctx, op)) {
return true;
}
- z_mask = arg_info(op->args[1])->z_mask;
- s_mask = arg_info(op->args[1])->s_mask;
+ t1 = arg_info(op->args[1]);
+ z_mask = t1->z_mask;
+ s_mask = t1->s_mask;
s_mask_old = s_mask;
switch (op->opc) {
CASE_OP_32_64(ext8s):
- sign = INT8_MIN;
- z_mask = (uint8_t)z_mask;
+ s_mask |= INT8_MIN;
+ z_mask = (int8_t)z_mask;
break;
CASE_OP_32_64(ext16s):
- sign = INT16_MIN;
- z_mask = (uint16_t)z_mask;
+ s_mask |= INT16_MIN;
+ z_mask = (int16_t)z_mask;
break;
case INDEX_op_ext_i32_i64:
type_change = true;
QEMU_FALLTHROUGH;
case INDEX_op_ext32s_i64:
- sign = INT32_MIN;
- z_mask = (uint32_t)z_mask;
+ s_mask |= INT32_MIN;
+ z_mask = (int32_t)z_mask;
break;
default:
g_assert_not_reached();
}
- if (z_mask & sign) {
- z_mask |= sign;
- }
- s_mask |= sign << 1;
-
- ctx->z_mask = z_mask;
- ctx->s_mask = s_mask;
if (0 && !type_change && fold_affected_mask(ctx, op, s_mask &
~s_mask_old)) {
return true;
}
- return fold_masks(ctx, op);
+ return fold_masks_zs(ctx, op, z_mask, s_mask);
}
static bool fold_extu(OptContext *ctx, TCGOp *op)
--
2.43.0
- [PULL 21/72] tcg/optimize: Use fold_masks_z in fold_extract, (continued)
- [PULL 21/72] tcg/optimize: Use fold_masks_z in fold_extract, Richard Henderson, 2024/12/24
- [PULL 24/72] tcg/optimize: Use fold_masks_z in fold_extu, Richard Henderson, 2024/12/24
- [PULL 26/72] tcg/optimize: Use finish_folding in fold_mul*, Richard Henderson, 2024/12/24
- [PULL 29/72] tcg/optimize: Use fold_masks_s in fold_nor, Richard Henderson, 2024/12/24
- [PULL 30/72] tcg/optimize: Use fold_masks_s in fold_not, Richard Henderson, 2024/12/24
- [PULL 32/72] tcg/optimize: Use fold_masks_zs in fold_orc, Richard Henderson, 2024/12/24
- [PULL 31/72] tcg/optimize: Use fold_masks_zs in fold_or, Richard Henderson, 2024/12/24
- [PULL 34/72] tcg/optimize: Return true from fold_qemu_st, fold_tcg_st, Richard Henderson, 2024/12/24
- [PULL 35/72] tcg/optimize: Use finish_folding in fold_remainder, Richard Henderson, 2024/12/24
- [PULL 02/72] plugins: optimize cpu_index code generation, Richard Henderson, 2024/12/24
- [PULL 23/72] tcg/optimize: Use fold_masks_zs in fold_exts,
Richard Henderson <=
- [PULL 38/72] tcg/optimize: Use fold_masks_s in fold_negsetcond, Richard Henderson, 2024/12/24
- [PULL 39/72] tcg/optimize: Use fold_masks_z in fold_setcond2, Richard Henderson, 2024/12/24
- [PULL 11/72] tcg/optimize: Use fold_masks_zs in fold_and, Richard Henderson, 2024/12/24
- [PULL 33/72] tcg/optimize: Use fold_masks_zs in fold_qemu_ld, Richard Henderson, 2024/12/24
- [PULL 20/72] tcg/optimize: Use fold_masks_s in fold_eqv, Richard Henderson, 2024/12/24
- [PULL 22/72] tcg/optimize: Use finish_folding in fold_extract2, Richard Henderson, 2024/12/24
- [PULL 28/72] tcg/optimize: Use fold_masks_z in fold_neg_no_const, Richard Henderson, 2024/12/24
- [PULL 27/72] tcg/optimize: Use fold_masks_s in fold_nand, Richard Henderson, 2024/12/24
- [PULL 36/72] tcg/optimize: Distinguish simplification in fold_setcond_zmask, Richard Henderson, 2024/12/24
- [PULL 37/72] tcg/optimize: Use fold_masks_z in fold_setcond, Richard Henderson, 2024/12/24