static bool trans_fsgnj_s(DisasContext *ctx, arg_fsgnj_s *a)
{
REQUIRE_FPU;
- REQUIRE_EXT(ctx, RVF);
+ REQUIRE_ZFINX_OR_F(ctx);
+ TCGv_i64 dest = dest_fpr(ctx, a->rd);
if (a->rs1 == a->rs2) { /* FMOV */
- gen_check_nanbox_s(cpu_fpr[a->rd], cpu_fpr[a->rs1]);
+ TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
+ if (ctx->ext_zfinx) {
+ gen_nanbox_s(dest, src1);
} else { /* FSGNJ */
- TCGv_i64 rs1 = tcg_temp_new_i64();
- TCGv_i64 rs2 = tcg_temp_new_i64();
-
- gen_check_nanbox_s(rs1, cpu_fpr[a->rs1]);
- gen_check_nanbox_s(rs2, cpu_fpr[a->rs2]);
-
- /* This formulation retains the nanboxing of rs2. */
- tcg_gen_deposit_i64(cpu_fpr[a->rd], rs2, rs1, 0, 31);
- tcg_temp_free_i64(rs1);
- tcg_temp_free_i64(rs2);
+ TCGv_i64 rs1, rs2;
+ if (!ctx->ext_zfinx) {
+ TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
+ TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
+ rs1 = tcg_temp_new_i64();
+ rs2 = tcg_temp_new_i64();
+ gen_check_nanbox_s(rs1, src1);
+ gen_check_nanbox_s(rs2, src2);
+ } else {
+ rs1 = get_fpr_hs(ctx, a->rs1);
+ rs2 = get_fpr_hs(ctx, a->rs2);
+ }
+
+ /* This formulation retains the nanboxing of rs2 in normal 'F'. */
+ tcg_gen_deposit_i64(dest, rs2, rs1, 0, 31);
+ if (!ctx->ext_zfinx) {
+ tcg_temp_free_i64(rs1);
+ tcg_temp_free_i64(rs2);
+ } else {
+ gen_nanbox_s(dest, dest);
+ }
diff --git a/target/riscv/internals.h b/target/riscv/internals.h
index 065e8162a2..9f3f3319f2 100644
--- a/target/riscv/internals.h
+++ b/target/riscv/internals.h
@@ -51,8 +51,12 @@ static inline uint64_t nanbox_s(float32 f)
return f | MAKE_64BIT_MASK(32, 32);
}
-static inline float32 check_nanbox_s(uint64_t f)
+static inline float32 check_nanbox_s(CPURISCVState *env, uint64_t f)
{
+ /* Disable nanbox check when enable zfinx */
+ if (RISCV_CPU(env_cpu(env))->cfg.ext_zfinx)
+ return (uint32_t)f;
+