Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
tcg/arm: Use full load/store helpers in user-only mode
Instead of using helper_unaligned_{ld,st}, use the full load/store helpers.
This will allow the fast path to increase alignment to implement atomicity
while not immediately raising an alignment exception.

Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
  • Loading branch information
rth7680 committed May 16, 2023
1 parent 8c72c74 commit ab85b80
Showing 1 changed file with 0 additions and 45 deletions.
45 changes: 0 additions & 45 deletions tcg/arm/tcg-target.c.inc
Expand Up @@ -1325,7 +1325,6 @@ typedef struct {
bool index_scratch;
} HostAddress;

#ifdef CONFIG_SOFTMMU
static TCGReg ldst_ra_gen(TCGContext *s, const TCGLabelQemuLdst *l, int arg)
{
/* We arrive at the slow path via "BLNE", so R14 contains l->raddr. */
Expand Down Expand Up @@ -1368,50 +1367,6 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
tcg_out_goto(s, COND_AL, qemu_st_helpers[opc & MO_SIZE]);
return true;
}
#else
static bool tcg_out_fail_alignment(TCGContext *s, TCGLabelQemuLdst *l)
{
if (!reloc_pc24(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
return false;
}

if (TARGET_LONG_BITS == 64) {
/* 64-bit target address is aligned into R2:R3. */
TCGMovExtend ext[2] = {
{ .dst = TCG_REG_R2, .dst_type = TCG_TYPE_I32,
.src = l->addrlo_reg,
.src_type = TCG_TYPE_I32, .src_ext = MO_UL },
{ .dst = TCG_REG_R3, .dst_type = TCG_TYPE_I32,
.src = l->addrhi_reg,
.src_type = TCG_TYPE_I32, .src_ext = MO_UL },
};
tcg_out_movext2(s, &ext[0], &ext[1], TCG_REG_TMP);
} else {
tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_R1, l->addrlo_reg);
}
tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_R0, TCG_AREG0);

/*
* Tail call to the helper, with the return address back inline,
* just for the clarity of the debugging traceback -- the helper
* cannot return. We have used BLNE to arrive here, so LR is
* already set.
*/
tcg_out_goto(s, COND_AL, (const void *)
(l->is_ld ? helper_unaligned_ld : helper_unaligned_st));
return true;
}

static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
{
return tcg_out_fail_alignment(s, l);
}

static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
{
return tcg_out_fail_alignment(s, l);
}
#endif /* SOFTMMU */

static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h,
TCGReg addrlo, TCGReg addrhi,
Expand Down

0 comments on commit ab85b80

Please sign in to comment.