From: Michael Karcher <kernel@mkarcher.dialup.fu-berlin.de> stable inclusion from stable-v6.6.112 commit 674ff598148a28bae0b5372339de56f2abf0b1d1 category: bugfix bugzilla: https://gitee.com/src-openeuler/kernel/issues/ID6B4C CVE: CVE-2025-40126 Reference: https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/commit/?id=... -------------------------------- [ Upstream commit 4fba1713001195e59cfc001ff1f2837dab877efb ] The referenced commit introduced exception handlers on user-space memory references in copy_from_user and copy_to_user. These handlers return from the respective function and calculate the remaining bytes left to copy using the current register contents. This commit fixes a couple of bad calculations. This will fix the return value of copy_from_user and copy_to_user in the faulting case. The behaviour of memcpy stays unchanged. Fixes: cb736fdbb208 ("sparc64: Convert U1copy_{from,to}_user to accurate exception reporting.") Tested-by: John Paul Adrian Glaubitz <glaubitz@physik.fu-berlin.de> # on QEMU 10.0.3 Tested-by: René Rebe <rene@exactcode.com> # on Ultra 5 UltraSparc IIi Tested-by: Jonathan 'theJPster' Pallant <kernel@thejpster.org.uk> # on Sun Netra T1 Signed-off-by: Michael Karcher <kernel@mkarcher.dialup.fu-berlin.de> Reviewed-by: Andreas Larsson <andreas@gaisler.com> Link: https://lore.kernel.org/r/20250905-memcpy_series-v4-1-1ca72dda195b@mkarcher.... Signed-off-by: Andreas Larsson <andreas@gaisler.com> Signed-off-by: Sasha Levin <sashal@kernel.org> Signed-off-by: Zhang Xiaoxu <zhangxiaoxu5@huawei.com> --- arch/sparc/lib/U1memcpy.S | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/arch/sparc/lib/U1memcpy.S b/arch/sparc/lib/U1memcpy.S index 635398ec7540..154fbd35400c 100644 --- a/arch/sparc/lib/U1memcpy.S +++ b/arch/sparc/lib/U1memcpy.S @@ -164,17 +164,18 @@ ENTRY(U1_gs_40_fp) retl add %o0, %o2, %o0 ENDPROC(U1_gs_40_fp) -ENTRY(U1_g3_0_fp) - VISExitHalf - retl - add %g3, %o2, %o0 -ENDPROC(U1_g3_0_fp) ENTRY(U1_g3_8_fp) VISExitHalf add %g3, 8, %g3 retl add %g3, %o2, %o0 ENDPROC(U1_g3_8_fp) +ENTRY(U1_g3_16_fp) + VISExitHalf + add %g3, 16, %g3 + retl + add %g3, %o2, %o0 +ENDPROC(U1_g3_16_fp) ENTRY(U1_o2_0_fp) VISExitHalf retl @@ -547,18 +548,18 @@ FUNC_NAME: /* %o0=dst, %o1=src, %o2=len */ 62: FINISH_VISCHUNK(o0, f44, f46) 63: UNEVEN_VISCHUNK_LAST(o0, f46, f0) -93: EX_LD_FP(LOAD(ldd, %o1, %f2), U1_g3_0_fp) +93: EX_LD_FP(LOAD(ldd, %o1, %f2), U1_g3_8_fp) add %o1, 8, %o1 subcc %g3, 8, %g3 faligndata %f0, %f2, %f8 - EX_ST_FP(STORE(std, %f8, %o0), U1_g3_8_fp) + EX_ST_FP(STORE(std, %f8, %o0), U1_g3_16_fp) bl,pn %xcc, 95f add %o0, 8, %o0 - EX_LD_FP(LOAD(ldd, %o1, %f0), U1_g3_0_fp) + EX_LD_FP(LOAD(ldd, %o1, %f0), U1_g3_8_fp) add %o1, 8, %o1 subcc %g3, 8, %g3 faligndata %f2, %f0, %f8 - EX_ST_FP(STORE(std, %f8, %o0), U1_g3_8_fp) + EX_ST_FP(STORE(std, %f8, %o0), U1_g3_16_fp) bge,pt %xcc, 93b add %o0, 8, %o0 -- 2.34.1