From e1b1da103ceee2db4497e74c3376c26de197df4f Mon Sep 17 00:00:00 2001 From: Diab Neiroukh Date: Sat, 5 Dec 2020 22:10:40 +0000 Subject: [PATCH] arm64: Adjust mem*.S to use WEAK instead of .weak. Commit 39d114ddc68223022c12ae3a1573912bc4b585e5 added .weak directives to support KASAN on AArch64, however this breaks support for LLVM's integrated assembler as LLVM switched the .weak directive to produce a STB_GLOBAL binding rather than STB_WEAK [1]. By utilising the WEAK macro instead, both LLVM and GCC will produce a STB_WEAK binding. [1] : https://reviews.llvm.org/D90108 Link: https://github.com/ClangBuiltLinux/linux/issues/1190 Signed-off-by: Diab Neiroukh --- arch/arm64/lib/memcpy.S | 3 +-- arch/arm64/lib/memmove.S | 3 +-- arch/arm64/lib/memset.S | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/arch/arm64/lib/memcpy.S b/arch/arm64/lib/memcpy.S index 67613937711f..dfedd4ab1a76 100644 --- a/arch/arm64/lib/memcpy.S +++ b/arch/arm64/lib/memcpy.S @@ -68,9 +68,8 @@ stp \ptr, \regB, [\regC], \val .endm - .weak memcpy ENTRY(__memcpy) -ENTRY(memcpy) +WEAK(memcpy) #include "copy_template.S" ret ENDPIPROC(memcpy) diff --git a/arch/arm64/lib/memmove.S b/arch/arm64/lib/memmove.S index f5ac945d2a65..d2dadccb62c5 100644 --- a/arch/arm64/lib/memmove.S +++ b/arch/arm64/lib/memmove.S @@ -57,9 +57,8 @@ C_h .req x12 D_l .req x13 D_h .req x14 - .weak memmove ENTRY(__memmove) -ENTRY(memmove) +WEAK(memmove) prfm pldl1strm, [src, #L1_CACHE_BYTES] cmp dstin, src b.lo __memcpy diff --git a/arch/arm64/lib/memset.S b/arch/arm64/lib/memset.S index f2670a9f218c..316263c47c00 100644 --- a/arch/arm64/lib/memset.S +++ b/arch/arm64/lib/memset.S @@ -54,9 +54,8 @@ dst .req x8 tmp3w .req w9 tmp3 .req x9 - .weak memset ENTRY(__memset) -ENTRY(memset) +WEAK(memset) mov dst, dstin /* Preserve return value. */ and A_lw, val, #255 orr A_lw, A_lw, A_lw, lsl #8