Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
tcg/loongarch64: Rationalize args to tcg_out_qemu_{ld,st}
Interpret the variable argument placement in the caller.  Shift some
code around slightly to share more between softmmu and user-only.

Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
  • Loading branch information
rth7680 committed May 5, 2023
1 parent 1df6d61 commit 7f67e58
Showing 1 changed file with 40 additions and 56 deletions.
96 changes: 40 additions & 56 deletions tcg/loongarch64/tcg-target.c.inc
Expand Up @@ -1049,39 +1049,31 @@ static void tcg_out_qemu_ld_indexed(TCGContext *s, TCGReg rd, TCGReg rj,
}
}

static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, TCGType type)
static void tcg_out_qemu_ld(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
MemOpIdx oi, TCGType data_type)
{
TCGReg addr_regl;
TCGReg data_regl;
MemOpIdx oi;
MemOp opc;
#if defined(CONFIG_SOFTMMU)
MemOp opc = get_memop(oi);
TCGReg base, index;

#ifdef CONFIG_SOFTMMU
tcg_insn_unit *label_ptr[1];

tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 1);
index = TCG_REG_TMP2;
#else
unsigned a_bits;
unsigned a_bits = get_alignment_bits(opc);
if (a_bits) {
tcg_out_test_alignment(s, true, addr_reg, a_bits);
}
index = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
#endif
TCGReg base;

data_regl = *args++;
addr_regl = *args++;
oi = *args++;
opc = get_memop(oi);
base = tcg_out_zext_addr_if_32_bit(s, addr_reg, TCG_REG_TMP0);
tcg_out_qemu_ld_indexed(s, data_reg, base, index, opc, data_type);

#if defined(CONFIG_SOFTMMU)
tcg_out_tlb_load(s, addr_regl, oi, label_ptr, 1);
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
tcg_out_qemu_ld_indexed(s, data_regl, base, TCG_REG_TMP2, opc, type);
add_qemu_ldst_label(s, 1, oi, type,
data_regl, addr_regl,
#ifdef CONFIG_SOFTMMU
add_qemu_ldst_label(s, true, oi, data_type, data_reg, addr_reg,
s->code_ptr, label_ptr);
#else
a_bits = get_alignment_bits(opc);
if (a_bits) {
tcg_out_test_alignment(s, true, addr_regl, a_bits);
}
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
TCGReg guest_base_reg = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
tcg_out_qemu_ld_indexed(s, data_regl, base, guest_base_reg, opc, type);
#endif
}

Expand Down Expand Up @@ -1109,39 +1101,31 @@ static void tcg_out_qemu_st_indexed(TCGContext *s, TCGReg data,
}
}

static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, TCGType type)
static void tcg_out_qemu_st(TCGContext *s, TCGReg data_reg, TCGReg addr_reg,
MemOpIdx oi, TCGType data_type)
{
TCGReg addr_regl;
TCGReg data_regl;
MemOpIdx oi;
MemOp opc;
#if defined(CONFIG_SOFTMMU)
MemOp opc = get_memop(oi);
TCGReg base, index;

#ifdef CONFIG_SOFTMMU
tcg_insn_unit *label_ptr[1];

tcg_out_tlb_load(s, addr_reg, oi, label_ptr, 0);
index = TCG_REG_TMP2;
#else
unsigned a_bits;
unsigned a_bits = get_alignment_bits(opc);
if (a_bits) {
tcg_out_test_alignment(s, false, addr_reg, a_bits);
}
index = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
#endif
TCGReg base;

data_regl = *args++;
addr_regl = *args++;
oi = *args++;
opc = get_memop(oi);
base = tcg_out_zext_addr_if_32_bit(s, addr_reg, TCG_REG_TMP0);
tcg_out_qemu_st_indexed(s, data_reg, base, index, opc);

#if defined(CONFIG_SOFTMMU)
tcg_out_tlb_load(s, addr_regl, oi, label_ptr, 0);
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
tcg_out_qemu_st_indexed(s, data_regl, base, TCG_REG_TMP2, opc);
add_qemu_ldst_label(s, 0, oi, type,
data_regl, addr_regl,
#ifdef CONFIG_SOFTMMU
add_qemu_ldst_label(s, false, oi, data_type, data_reg, addr_reg,
s->code_ptr, label_ptr);
#else
a_bits = get_alignment_bits(opc);
if (a_bits) {
tcg_out_test_alignment(s, false, addr_regl, a_bits);
}
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
TCGReg guest_base_reg = USE_GUEST_BASE ? TCG_GUEST_BASE_REG : TCG_REG_ZERO;
tcg_out_qemu_st_indexed(s, data_regl, base, guest_base_reg, opc);
#endif
}

Expand Down Expand Up @@ -1564,16 +1548,16 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
break;

case INDEX_op_qemu_ld_i32:
tcg_out_qemu_ld(s, args, TCG_TYPE_I32);
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I32);
break;
case INDEX_op_qemu_ld_i64:
tcg_out_qemu_ld(s, args, TCG_TYPE_I64);
tcg_out_qemu_ld(s, a0, a1, a2, TCG_TYPE_I64);
break;
case INDEX_op_qemu_st_i32:
tcg_out_qemu_st(s, args, TCG_TYPE_I32);
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
break;
case INDEX_op_qemu_st_i64:
tcg_out_qemu_st(s, args, TCG_TYPE_I64);
tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
break;

case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
Expand Down

0 comments on commit 7f67e58

Please sign in to comment.