Skip to content

Commit

Permalink
soc: qcom: scm: Fix scm_call_count when used with LSE atomics
Browse files Browse the repository at this point in the history
LSE atomic increments and decrements clobber the x0 and x1 registers,
and since these registers are used in volatile inline assembly for SCM
calls, GCC does not preserve their values across the atomic_inc() and
atomic_dec() calls. This results in x0 and x1 containing garbage values
before and after the SCM call, breaking it entirely.

Wrapping the atomic_inc() and atomic_dec() outside the SCM call
functions fixes the issue.

Signed-off-by: Sultan Alsawaf <sultan@kerneltoast.com>
[lazerl0rd: Adjust title to represent this change affects LLVM/Clang
            too.]
Signed-off-by: Diab Neiroukh <lazerl0rd@thezest.dev>
Signed-off-by: dreamisbaka <jolinux.g@gmail.com>
  • Loading branch information
kerneltoast authored and acuicultor committed May 14, 2021
1 parent 9de2edc commit 520bd16
Showing 1 changed file with 38 additions and 11 deletions.
49 changes: 38 additions & 11 deletions drivers/soc/qcom/scm.c
Expand Up @@ -95,7 +95,7 @@ static int scm_remap_error(int err)

#ifdef CONFIG_ARM64

static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
static int ___scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
u64 *ret1, u64 *ret2, u64 *ret3)
{
register u64 r0 asm("x0") = x0;
Expand All @@ -106,7 +106,6 @@ static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
register u64 r5 asm("x5") = x5;
register u64 r6 asm("x6") = 0;

atomic_inc(&scm_call_count);
do {
asm volatile(
__asmeq("%0", R0_STR)
Expand Down Expand Up @@ -135,8 +134,6 @@ static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
"x14", "x15", "x16", "x17");
} while (r0 == SCM_INTERRUPTED);

atomic_dec(&scm_call_count);

if (ret1)
*ret1 = r1;
if (ret2)
Expand All @@ -147,7 +144,19 @@ static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
return r0;
}

static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
u64 *ret1, u64 *ret2, u64 *ret3)
{
int ret;

atomic_inc(&scm_call_count);
ret = ___scm_call_armv8_64(x0, x1, x2, x3, x4, x5, ret1, ret2, ret3);
atomic_dec(&scm_call_count);

return ret;
}

static int ___scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
u64 *ret1, u64 *ret2, u64 *ret3)
{
register u32 r0 asm("w0") = w0;
Expand All @@ -158,7 +167,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
register u32 r5 asm("w5") = w5;
register u32 r6 asm("w6") = 0;

atomic_inc(&scm_call_count);
do {
asm volatile(
__asmeq("%0", R0_STR)
Expand Down Expand Up @@ -188,8 +196,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,

} while (r0 == SCM_INTERRUPTED);

atomic_dec(&scm_call_count);

if (ret1)
*ret1 = r1;
if (ret2)
Expand All @@ -200,6 +206,18 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
return r0;
}

static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
u64 *ret1, u64 *ret2, u64 *ret3)
{
int ret;

atomic_inc(&scm_call_count);
ret = ___scm_call_armv8_32(w0, w1, w2, w3, w4, w5, ret1, ret2, ret3);
atomic_dec(&scm_call_count);

return ret;
}

#else

static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
Expand All @@ -213,7 +231,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
register u32 r5 asm("r5") = w5;
register u32 r6 asm("r6") = 0;

atomic_inc(&scm_call_count);
do {
asm volatile(
__asmeq("%0", R0_STR)
Expand Down Expand Up @@ -241,8 +258,6 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,

} while (r0 == SCM_INTERRUPTED);

atomic_dec(&scm_call_count);

if (ret1)
*ret1 = r1;
if (ret2)
Expand All @@ -253,6 +268,18 @@ static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
return r0;
}

static int __scm_call_armv8_32(u32 w0, u32 w1, u32 w2, u32 w3, u32 w4, u32 w5,
u64 *ret1, u64 *ret2, u64 *ret3)
{
int ret;

atomic_inc(&scm_call_count);
ret = ___scm_call_armv8_32(w0, w1, w2, w3, w4, w5, ret1, ret2, ret3);
atomic_dec(&scm_call_count);

return ret;
}

static int __scm_call_armv8_64(u64 x0, u64 x1, u64 x2, u64 x3, u64 x4, u64 x5,
u64 *ret1, u64 *ret2, u64 *ret3)
{
Expand Down

0 comments on commit 520bd16

Please sign in to comment.