From: Fangrui Song maskray@google.com
commit ec9d78070de986ecf581ea204fd322af4d2477ec upstream.
Commit 39d114ddc682 ("arm64: add KASAN support") added .weak directives to arch/arm64/lib/mem*.S instead of changing the existing SYM_FUNC_START_PI macros. This can lead to the assembly snippet `.weak memcpy ... .globl memcpy` which will produce a STB_WEAK memcpy with GNU as but STB_GLOBAL memcpy with LLVM's integrated assembler before LLVM 12. LLVM 12 (since https://reviews.llvm.org/D90108) will error on such an overridden symbol binding.
Use the appropriate SYM_FUNC_START_WEAK_PI instead.
Fixes: 39d114ddc682 ("arm64: add KASAN support") Reported-by: Sami Tolvanen samitolvanen@google.com Signed-off-by: Fangrui Song maskray@google.com Tested-by: Sami Tolvanen samitolvanen@google.com Tested-by: Nick Desaulniers ndesaulniers@google.com Reviewed-by: Nick Desaulniers ndesaulniers@google.com Cc: stable@vger.kernel.org Link: https://lore.kernel.org/r/20201029181951.1866093-1-maskray@google.com Signed-off-by: Will Deacon will@kernel.org [nd: backport to adjust for missing: commit 3ac0f4526dfb ("arm64: lib: Use modern annotations for assembly functions") commit 35e61c77ef38 ("arm64: asm: Add new-style position independent function annotations")] Signed-off-by: Nick Desaulniers ndesaulniers@google.com Signed-off-by: Greg Kroah-Hartman gregkh@linuxfoundation.org --- arch/arm64/lib/memcpy.S | 3 +-- arch/arm64/lib/memmove.S | 3 +-- arch/arm64/lib/memset.S | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-)
diff --git a/arch/arm64/lib/memcpy.S b/arch/arm64/lib/memcpy.S index 67613937711f..dfedd4ab1a76 100644 --- a/arch/arm64/lib/memcpy.S +++ b/arch/arm64/lib/memcpy.S @@ -68,9 +68,8 @@ stp \ptr, \regB, [\regC], \val .endm
- .weak memcpy ENTRY(__memcpy) -ENTRY(memcpy) +WEAK(memcpy) #include "copy_template.S" ret ENDPIPROC(memcpy) diff --git a/arch/arm64/lib/memmove.S b/arch/arm64/lib/memmove.S index a5a4459013b1..e3de8f05c21a 100644 --- a/arch/arm64/lib/memmove.S +++ b/arch/arm64/lib/memmove.S @@ -57,9 +57,8 @@ C_h .req x12 D_l .req x13 D_h .req x14
- .weak memmove ENTRY(__memmove) -ENTRY(memmove) +WEAK(memmove) cmp dstin, src b.lo __memcpy add tmp1, src, count diff --git a/arch/arm64/lib/memset.S b/arch/arm64/lib/memset.S index f2670a9f218c..316263c47c00 100644 --- a/arch/arm64/lib/memset.S +++ b/arch/arm64/lib/memset.S @@ -54,9 +54,8 @@ dst .req x8 tmp3w .req w9 tmp3 .req x9
- .weak memset ENTRY(__memset) -ENTRY(memset) +WEAK(memset) mov dst, dstin /* Preserve return value. */ and A_lw, val, #255 orr A_lw, A_lw, A_lw, lsl #8