Date: Tue, 6 Apr 2021 11:29:47 GMT From: Leandro Lupori <luporl@FreeBSD.org> To: src-committers@FreeBSD.org, dev-commits-src-all@FreeBSD.org, dev-commits-src-branches@FreeBSD.org Subject: git: 1805ce694542 - stable/13 - powerpc64: enforce natural alignment in bcopy Message-ID: <202104061129.136BTlkO052805@gitrepo.freebsd.org>
next in thread | raw e-mail | index | archive | help
The branch stable/13 has been updated by luporl: URL: https://cgit.FreeBSD.org/src/commit/?id=1805ce694542f03a95a064aa3be5905ce9cffa3b commit 1805ce694542f03a95a064aa3be5905ce9cffa3b Author: Leandro Lupori <luporl@FreeBSD.org> AuthorDate: 2021-03-25 14:54:06 +0000 Commit: Leandro Lupori <luporl@FreeBSD.org> CommitDate: 2021-04-06 11:28:26 +0000 powerpc64: enforce natural alignment in bcopy POWER architecture CPUs (Book-S) require natural alignment for cache-inhibited storage accesses. Since we can't know the caching model for a page ahead of time, always enforce natural alignment in bcopy. This fixes a SIGBUS when calling the function with misaligned pointers on POWER7. Submitted by: Bruno Larsen <bruno.larsen@eldorado.org.br> Reviewed by: luporl, bdragon (IRC) MFC after: 1 week Sponsored by: Eldorado Research Institute (eldorado.org.br) Differential Revision: https://reviews.freebsd.org/D28776 (cherry picked from commit 2f561284033c0f53d0911baf9056078e6026a278) --- lib/libc/powerpc64/string/bcopy.S | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/lib/libc/powerpc64/string/bcopy.S b/lib/libc/powerpc64/string/bcopy.S index bb860c098feb..4dc80c264362 100644 --- a/lib/libc/powerpc64/string/bcopy.S +++ b/lib/libc/powerpc64/string/bcopy.S @@ -34,6 +34,11 @@ __FBSDID("$FreeBSD$"); #define BLOCK_SIZE (1 << BLOCK_SIZE_BITS) #define BLOCK_SIZE_MASK (BLOCK_SIZE - 1) +/* Minimum 8 byte alignment, to avoid cache-inhibited alignment faults.*/ +#ifndef ALIGN_MASK +#define ALIGN_MASK 0x7 +#endif + #define MULTI_PHASE_THRESHOLD 512 #ifndef FN_NAME @@ -66,9 +71,38 @@ ENTRY(FN_NAME) mr %r4, %r0 #endif + /* First check for relative alignment, if unaligned copy one byte at a time */ + andi. %r8, %r3, ALIGN_MASK + andi. %r7, %r4, ALIGN_MASK + cmpd %r7, %r8 + bne .Lunaligned + + cmpldi %r5, MULTI_PHASE_THRESHOLD bge .Lmulti_phase + b .Lfast_copy + +.Lunaligned: + /* forward or backward copy? */ + cmpd %r4, %r3 + blt .Lbackward_unaligned + + /* Just need to setup increment and jump to copy */ + li %r0, 1 + mtctr %r5 + b .Lsingle_1_loop + +.Lbackward_unaligned: + /* advance src and dst to last byte, set decrement and jump to copy */ + add %r3, %r3, %r5 + addi %r3, %r3, -1 + add %r4, %r4, %r5 + addi %r4, %r4, -1 + li %r0, -1 + mtctr %r5 + b .Lsingle_1_loop +.Lfast_copy: /* align src */ cmpd %r4, %r3 /* forward or backward copy? */ blt .Lbackward_align
Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?202104061129.136BTlkO052805>